From 8dc39179d7e224019e2ddc961e9fa3b675b21f7d Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Tue, 5 May 2020 14:55:40 +0300 Subject: [PATCH 001/115] Update docs. --- Doc/library/xml.etree.elementtree.rst | 2 ++ Doc/whatsnew/3.9.rst | 5 ----- Misc/NEWS.d/3.9.0a1.rst | 2 +- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst index 658bc3a54f86e5..bdd0d40cab67d9 100644 --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -15,6 +15,8 @@ for parsing and creating XML data. .. versionchanged:: 3.3 This module will use a fast implementation whenever available. + +.. versiondeprecated:: 3.3 The :mod:`xml.etree.cElementTree` module is deprecated. diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 11e577baa8fb5f..ac5331e0fdf7ea 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -670,11 +670,6 @@ Removed module have been removed. They were deprecated in Python 3.2. Use ``iter(x)`` or ``list(x)`` instead of ``x.getchildren()`` and ``x.iter()`` or ``list(x.iter())`` instead of ``x.getiterator()``. - The ``xml.etree.cElementTree`` module has been removed, - use the :mod:`xml.etree.ElementTree` module instead. - Since Python 3.3 the ``xml.etree.cElementTree`` module has been deprecated, - the ``xml.etree.ElementTree`` module uses a fast implementation whenever - available. (Contributed by Serhiy Storchaka in :issue:`36543`.) * The old :mod:`plistlib` API has been removed, it was deprecated since Python diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst index fb74d3622263d4..e5b4972b1922c4 100644 --- a/Misc/NEWS.d/3.9.0a1.rst +++ b/Misc/NEWS.d/3.9.0a1.rst @@ -3372,7 +3372,7 @@ markup and any values in the message. Patch by Paul Ganssle .. section: Library Removed methods Element.getchildren(), Element.getiterator() and -ElementTree.getiterator() and the xml.etree.cElementTree module. +ElementTree.getiterator(). .. From 4e01946cafca0cf49f796c3118e0d65237bcad69 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 15:43:37 +0200 Subject: [PATCH 002/115] bpo-40513: Per-interpreter signals pending (GH-19924) Move signals_pending from _PyRuntime.ceval to PyInterpreterState.ceval. --- Include/internal/pycore_interp.h | 2 ++ Include/internal/pycore_runtime.h | 2 -- Python/ceval.c | 10 +++++----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 6e9937caa9dbff..251ee06ed4b006 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -43,6 +43,8 @@ struct _ceval_state { the fast path in the eval loop. */ _Py_atomic_int eval_breaker; struct _pending_calls pending; + /* Request for checking signals. */ + _Py_atomic_int signals_pending; }; diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 995fe231c32149..d432c6cc5112ae 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -17,8 +17,6 @@ struct _ceval_runtime_state { int recursion_limit; /* Request for dropping the GIL */ _Py_atomic_int gil_drop_request; - /* Request for checking signals. */ - _Py_atomic_int signals_pending; struct _gil_runtime_state gil; }; diff --git a/Python/ceval.c b/Python/ceval.c index e15d7e0b4603d2..addc0264b171a0 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -148,7 +148,7 @@ COMPUTE_EVAL_BREAKER(PyInterpreterState *interp, { _Py_atomic_store_relaxed(&ceval2->eval_breaker, _Py_atomic_load_relaxed(&ceval->gil_drop_request) - | (_Py_atomic_load_relaxed(&ceval->signals_pending) + | (_Py_atomic_load_relaxed(&ceval2->signals_pending) && _Py_ThreadCanHandleSignals(interp)) | (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do) && _Py_ThreadCanHandlePendingCalls()) @@ -201,7 +201,7 @@ SIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { struct _ceval_runtime_state *ceval = &interp->runtime->ceval; struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->signals_pending, 1); + _Py_atomic_store_relaxed(&ceval2->signals_pending, 1); /* eval_breaker is not set to 1 if thread_can_handle_signals() is false */ COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } @@ -212,7 +212,7 @@ UNSIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { struct _ceval_runtime_state *ceval = &interp->runtime->ceval; struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->signals_pending, 0); + _Py_atomic_store_relaxed(&ceval2->signals_pending, 0); COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } @@ -830,16 +830,16 @@ eval_frame_handle_pending(PyThreadState *tstate) { _PyRuntimeState * const runtime = &_PyRuntime; struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; /* Pending signals */ - if (_Py_atomic_load_relaxed(&ceval->signals_pending)) { + if (_Py_atomic_load_relaxed(&ceval2->signals_pending)) { if (handle_signals(tstate) != 0) { return -1; } } /* Pending calls */ - struct _ceval_state *ceval2 = &tstate->interp->ceval; if (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do)) { if (make_pending_calls(tstate) != 0) { return -1; From 0b1e3307e24b0af45787ab6456535b8346e0239a Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 16:14:31 +0200 Subject: [PATCH 003/115] bpo-40513: Per-interpreter gil_drop_request (GH-19927) Move gil_drop_request member from _PyRuntimeState.ceval to PyInterpreterState.ceval. --- Include/internal/pycore_interp.h | 2 + Include/internal/pycore_runtime.h | 2 - Python/ceval.c | 75 +++++++++++++++---------------- Python/ceval_gil.h | 13 +++--- 4 files changed, 45 insertions(+), 47 deletions(-) diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 251ee06ed4b006..fafc72eb97a007 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -42,6 +42,8 @@ struct _ceval_state { /* This single variable consolidates all requests to break out of the fast path in the eval loop. */ _Py_atomic_int eval_breaker; + /* Request for dropping the GIL */ + _Py_atomic_int gil_drop_request; struct _pending_calls pending; /* Request for checking signals. */ _Py_atomic_int signals_pending; diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index d432c6cc5112ae..c59733559167ad 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -15,8 +15,6 @@ extern "C" { struct _ceval_runtime_state { int recursion_limit; - /* Request for dropping the GIL */ - _Py_atomic_int gil_drop_request; struct _gil_runtime_state gil; }; diff --git a/Python/ceval.c b/Python/ceval.c index addc0264b171a0..6b002730c8d789 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -143,77 +143,70 @@ is_tstate_valid(PyThreadState *tstate) the GIL eventually anyway. */ static inline void COMPUTE_EVAL_BREAKER(PyInterpreterState *interp, - struct _ceval_runtime_state *ceval, - struct _ceval_state *ceval2) + struct _ceval_state *ceval) { - _Py_atomic_store_relaxed(&ceval2->eval_breaker, + _Py_atomic_store_relaxed(&ceval->eval_breaker, _Py_atomic_load_relaxed(&ceval->gil_drop_request) - | (_Py_atomic_load_relaxed(&ceval2->signals_pending) + | (_Py_atomic_load_relaxed(&ceval->signals_pending) && _Py_ThreadCanHandleSignals(interp)) - | (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do) + | (_Py_atomic_load_relaxed(&ceval->pending.calls_to_do) && _Py_ThreadCanHandlePendingCalls()) - | ceval2->pending.async_exc); + | ceval->pending.async_exc); } static inline void SET_GIL_DROP_REQUEST(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; + struct _ceval_state *ceval = &interp->ceval; _Py_atomic_store_relaxed(&ceval->gil_drop_request, 1); - _Py_atomic_store_relaxed(&ceval2->eval_breaker, 1); + _Py_atomic_store_relaxed(&ceval->eval_breaker, 1); } static inline void RESET_GIL_DROP_REQUEST(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; + struct _ceval_state *ceval = &interp->ceval; _Py_atomic_store_relaxed(&ceval->gil_drop_request, 0); - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + COMPUTE_EVAL_BREAKER(interp, ceval); } static inline void SIGNAL_PENDING_CALLS(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval2->pending.calls_to_do, 1); - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + struct _ceval_state *ceval = &interp->ceval; + _Py_atomic_store_relaxed(&ceval->pending.calls_to_do, 1); + COMPUTE_EVAL_BREAKER(interp, ceval); } static inline void UNSIGNAL_PENDING_CALLS(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval2->pending.calls_to_do, 0); - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + struct _ceval_state *ceval = &interp->ceval; + _Py_atomic_store_relaxed(&ceval->pending.calls_to_do, 0); + COMPUTE_EVAL_BREAKER(interp, ceval); } static inline void SIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval2->signals_pending, 1); + struct _ceval_state *ceval = &interp->ceval; + _Py_atomic_store_relaxed(&ceval->signals_pending, 1); /* eval_breaker is not set to 1 if thread_can_handle_signals() is false */ - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + COMPUTE_EVAL_BREAKER(interp, ceval); } static inline void UNSIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval2->signals_pending, 0); - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + struct _ceval_state *ceval = &interp->ceval; + _Py_atomic_store_relaxed(&ceval->signals_pending, 0); + COMPUTE_EVAL_BREAKER(interp, ceval); } @@ -229,10 +222,9 @@ SIGNAL_ASYNC_EXC(PyInterpreterState *interp) static inline void UNSIGNAL_ASYNC_EXC(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - ceval2->pending.async_exc = 0; - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + struct _ceval_state *ceval = &interp->ceval; + ceval->pending.async_exc = 0; + COMPUTE_EVAL_BREAKER(interp, ceval); } @@ -357,17 +349,19 @@ PyEval_ReleaseLock(void) { _PyRuntimeState *runtime = &_PyRuntime; PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); + struct _ceval_state *ceval2 = &tstate->interp->ceval; /* This function must succeed when the current thread state is NULL. We therefore avoid PyThreadState_Get() which dumps a fatal error in debug mode. */ - drop_gil(&runtime->ceval, tstate); + drop_gil(&runtime->ceval, ceval2, tstate); } void _PyEval_ReleaseLock(PyThreadState *tstate) { struct _ceval_runtime_state *ceval = &tstate->interp->runtime->ceval; - drop_gil(ceval, tstate); + struct _ceval_state *ceval2 = &tstate->interp->ceval; + drop_gil(ceval, ceval2, tstate); } void @@ -393,7 +387,9 @@ PyEval_ReleaseThread(PyThreadState *tstate) if (new_tstate != tstate) { Py_FatalError("wrong thread state"); } - drop_gil(&runtime->ceval, tstate); + struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; + drop_gil(ceval, ceval2, tstate); } #ifdef HAVE_FORK @@ -439,13 +435,14 @@ PyThreadState * PyEval_SaveThread(void) { _PyRuntimeState *runtime = &_PyRuntime; - struct _ceval_runtime_state *ceval = &runtime->ceval; PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, NULL); ensure_tstate_not_null(__func__, tstate); + struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; assert(gil_created(&ceval->gil)); - drop_gil(ceval, tstate); + drop_gil(ceval, ceval2, tstate); return tstate; } @@ -847,12 +844,12 @@ eval_frame_handle_pending(PyThreadState *tstate) } /* GIL drop request */ - if (_Py_atomic_load_relaxed(&ceval->gil_drop_request)) { + if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request)) { /* Give another thread a chance */ if (_PyThreadState_Swap(&runtime->gilstate, NULL) != tstate) { Py_FatalError("tstate mix-up"); } - drop_gil(ceval, tstate); + drop_gil(ceval, ceval2, tstate); /* Other threads may run now */ diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h index a025a9fad1248e..db47077d5c1ce1 100644 --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -141,7 +141,8 @@ static void recreate_gil(struct _gil_runtime_state *gil) } static void -drop_gil(struct _ceval_runtime_state *ceval, PyThreadState *tstate) +drop_gil(struct _ceval_runtime_state *ceval, struct _ceval_state *ceval2, + PyThreadState *tstate) { struct _gil_runtime_state *gil = &ceval->gil; if (!_Py_atomic_load_relaxed(&gil->locked)) { @@ -163,7 +164,7 @@ drop_gil(struct _ceval_runtime_state *ceval, PyThreadState *tstate) MUTEX_UNLOCK(gil->mutex); #ifdef FORCE_SWITCHING - if (_Py_atomic_load_relaxed(&ceval->gil_drop_request) && tstate != NULL) { + if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request) && tstate != NULL) { MUTEX_LOCK(gil->switch_mutex); /* Not switched yet => wait */ if (((PyThreadState*)_Py_atomic_load_relaxed(&gil->last_holder)) == tstate) @@ -226,6 +227,7 @@ take_gil(PyThreadState *tstate) assert(is_tstate_valid(tstate)); PyInterpreterState *interp = tstate->interp; struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; struct _gil_runtime_state *gil = &ceval->gil; /* Check that _PyEval_InitThreads() was called to create the lock */ @@ -289,12 +291,12 @@ take_gil(PyThreadState *tstate) in take_gil() while the main thread called wait_for_thread_shutdown() from Py_Finalize(). */ MUTEX_UNLOCK(gil->mutex); - drop_gil(ceval, tstate); + drop_gil(ceval, ceval2, tstate); PyThread_exit_thread(); } assert(is_tstate_valid(tstate)); - if (_Py_atomic_load_relaxed(&ceval->gil_drop_request)) { + if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request)) { RESET_GIL_DROP_REQUEST(interp); } else { @@ -303,8 +305,7 @@ take_gil(PyThreadState *tstate) handle signals. Note: RESET_GIL_DROP_REQUEST() calls COMPUTE_EVAL_BREAKER(). */ - struct _ceval_state *ceval2 = &interp->ceval; - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + COMPUTE_EVAL_BREAKER(interp, ceval2); } /* Don't access tstate if the thread must exit */ From c5fa364f4ea836f25dd07cfb328152d40a568371 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 16:41:11 +0200 Subject: [PATCH 004/115] bpo-40514: Add --with-experimental-isolated-subinterpreters (GH-19926) Add --with-experimental-isolated-subinterpreters build option to configure: better isolate subinterpreters, experimental build mode. When used, force the usage of the libc malloc() memory allocator, since pymalloc relies on the unique global interpreter lock (GIL). --- .../2020-05-05-15-39-11.bpo-40514.bZZmuS.rst | 2 ++ Python/preconfig.c | 10 +++++++ configure | 28 +++++++++++++++++++ configure.ac | 17 +++++++++++ pyconfig.h.in | 3 ++ 5 files changed, 60 insertions(+) create mode 100644 Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst diff --git a/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst b/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst new file mode 100644 index 00000000000000..ab9062c28f4bb9 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst @@ -0,0 +1,2 @@ +Add ``--with-experimental-isolated-subinterpreters`` build option to +``configure``: better isolate subinterpreters, experimental build mode. diff --git a/Python/preconfig.c b/Python/preconfig.c index 262738fa57da56..fd94d7dda1c298 100644 --- a/Python/preconfig.c +++ b/Python/preconfig.c @@ -291,7 +291,17 @@ _PyPreConfig_InitCompatConfig(PyPreConfig *config) config->coerce_c_locale_warn = 0; config->dev_mode = -1; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + /* bpo-40512: pymalloc is not compatible with subinterpreters, + force usage of libc malloc() which is thread-safe. */ +#ifdef Py_DEBUG + config->allocator = PYMEM_ALLOCATOR_MALLOC_DEBUG; +#else + config->allocator = PYMEM_ALLOCATOR_MALLOC; +#endif +#else config->allocator = PYMEM_ALLOCATOR_NOT_SET; +#endif #ifdef MS_WINDOWS config->legacy_windows_fs_encoding = -1; #endif diff --git a/configure b/configure index a8a35d0defc6b3..26e9aa9fe454e2 100755 --- a/configure +++ b/configure @@ -845,6 +845,7 @@ with_computed_gotos with_ensurepip with_openssl with_ssl_default_suites +with_experimental_isolated_subinterpreters ' ac_precious_vars='build_alias host_alias @@ -1575,6 +1576,9 @@ Optional Packages: leave OpenSSL's defaults untouched, STRING: use a custom string, PROTOCOL_SSLv2 ignores the setting, see Doc/library/ssl.rst + --with-experimental-isolated-subinterpreters + better isolate subinterpreters, experimental build + mode (default is no) Some influential environment variables: MACHDEP name for machine-dependent library files @@ -17489,6 +17493,30 @@ $as_echo "#define PY_SSL_DEFAULT_CIPHERS 1" >>confdefs.h fi +# --with-experimental-isolated-subinterpreters + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-experimental-isolated-subinterpreters" >&5 +$as_echo_n "checking for --with-experimental-isolated-subinterpreters... " >&6; } + +# Check whether --with-experimental-isolated-subinterpreters was given. +if test "${with_experimental_isolated_subinterpreters+set}" = set; then : + withval=$with_experimental_isolated_subinterpreters; +if test "$withval" != no +then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; }; + $as_echo "#define EXPERIMENTAL_ISOLATED_SUBINTERPRETERS 1" >>confdefs.h + +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; }; +fi +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + # generate output files ac_config_files="$ac_config_files Makefile.pre Misc/python.pc Misc/python-embed.pc Misc/python-config.sh" diff --git a/configure.ac b/configure.ac index f996051efc719e..acb6d4bfa8da10 100644 --- a/configure.ac +++ b/configure.ac @@ -5717,6 +5717,23 @@ AC_MSG_RESULT(python) AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1) ]) +# --with-experimental-isolated-subinterpreters +AH_TEMPLATE(EXPERIMENTAL_ISOLATED_SUBINTERPRETERS, + [Better isolate subinterpreters, experimental build mode.]) +AC_MSG_CHECKING(for --with-experimental-isolated-subinterpreters) +AC_ARG_WITH(experimental-isolated-subinterpreters, + AS_HELP_STRING([--with-experimental-isolated-subinterpreters], + [better isolate subinterpreters, experimental build mode (default is no)]), +[ +if test "$withval" != no +then + AC_MSG_RESULT(yes); + AC_DEFINE(EXPERIMENTAL_ISOLATED_SUBINTERPRETERS) +else + AC_MSG_RESULT(no); +fi], +[AC_MSG_RESULT(no)]) + # generate output files AC_CONFIG_FILES(Makefile.pre Misc/python.pc Misc/python-embed.pc Misc/python-config.sh) diff --git a/pyconfig.h.in b/pyconfig.h.in index 75ac368aadafec..c06c4958726c0f 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -38,6 +38,9 @@ /* Define if --enable-ipv6 is specified */ #undef ENABLE_IPV6 +/* Better isolate subinterpreters, experimental build mode. */ +#undef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + /* Define to 1 if your system stores words within floats with the most significant word first */ #undef FLOAT_WORDS_BIGENDIAN From 627f7012353411590434a7d5777ddcbcc8d97fcd Mon Sep 17 00:00:00 2001 From: Javier Buzzi Date: Tue, 5 May 2020 10:49:57 -0400 Subject: [PATCH 005/115] bpo-32117: Updated Simpsons names in docs (GH-19737) `sally` is not a Simpsons character Automerge-Triggered-By: @gvanrossum --- Doc/whatsnew/3.8.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 6d2b0d905ff06e..fdfc0a8f472cd6 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -428,8 +428,8 @@ Other Language Changes lastname, *members = family.split() return lastname.upper(), *members - >>> parse('simpsons homer marge bart lisa sally') - ('SIMPSONS', 'homer', 'marge', 'bart', 'lisa', 'sally') + >>> parse('simpsons homer marge bart lisa maggie') + ('SIMPSONS', 'homer', 'marge', 'bart', 'lisa', 'maggie') (Contributed by David Cuthbert and Jordan Chapman in :issue:`32117`.) From 4e30ed3af06ae655f4cb8aad8cba21f341384250 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 16:52:52 +0200 Subject: [PATCH 006/115] bpo-40513: Per-interpreter recursion_limit (GH-19929) Move recursion_limit member from _PyRuntimeState.ceval to PyInterpreterState.ceval. * Py_SetRecursionLimit() now only sets _Py_CheckRecursionLimit of ceval.c if the current Python thread is part of the main interpreter. * Inline _Py_MakeEndRecCheck() into _Py_LeaveRecursiveCall(). * Convert _Py_RecursionLimitLowerWaterMark() macro into a static inline function. --- Include/internal/pycore_ceval.h | 24 +++++++++++++----------- Include/internal/pycore_interp.h | 1 + Include/internal/pycore_runtime.h | 1 - Python/ceval.c | 24 ++++++++++++++---------- 4 files changed, 28 insertions(+), 22 deletions(-) diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 2df796deade3a9..18c8f027af16e6 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -65,12 +65,12 @@ PyAPI_DATA(int) _Py_CheckRecursionLimit; /* With USE_STACKCHECK macro defined, trigger stack checks in _Py_CheckRecursiveCall() on every 64th call to Py_EnterRecursiveCall. */ static inline int _Py_MakeRecCheck(PyThreadState *tstate) { - return (++tstate->recursion_depth > _Py_CheckRecursionLimit + return (++tstate->recursion_depth > tstate->interp->ceval.recursion_limit || ++tstate->stackcheck_counter > 64); } #else static inline int _Py_MakeRecCheck(PyThreadState *tstate) { - return (++tstate->recursion_depth > _Py_CheckRecursionLimit); + return (++tstate->recursion_depth > tstate->interp->ceval.recursion_limit); } #endif @@ -90,20 +90,22 @@ static inline int _Py_EnterRecursiveCall_inline(const char *where) { #define Py_EnterRecursiveCall(where) _Py_EnterRecursiveCall_inline(where) - /* Compute the "lower-water mark" for a recursion limit. When * Py_LeaveRecursiveCall() is called with a recursion depth below this mark, * the overflowed flag is reset to 0. */ -#define _Py_RecursionLimitLowerWaterMark(limit) \ - (((limit) > 200) \ - ? ((limit) - 50) \ - : (3 * ((limit) >> 2))) - -#define _Py_MakeEndRecCheck(x) \ - (--(x) < _Py_RecursionLimitLowerWaterMark(_Py_CheckRecursionLimit)) +static inline int _Py_RecursionLimitLowerWaterMark(int limit) { + if (limit > 200) { + return (limit - 50); + } + else { + return (3 * (limit >> 2)); + } +} static inline void _Py_LeaveRecursiveCall(PyThreadState *tstate) { - if (_Py_MakeEndRecCheck(tstate->recursion_depth)) { + tstate->recursion_depth--; + int limit = tstate->interp->ceval.recursion_limit; + if (tstate->recursion_depth < _Py_RecursionLimitLowerWaterMark(limit)) { tstate->overflowed = 0; } } diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index fafc72eb97a007..08291012365edc 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -33,6 +33,7 @@ struct _pending_calls { }; struct _ceval_state { + int recursion_limit; /* Records whether tracing is on for any thread. Counts the number of threads for which tstate->c_tracefunc is non-NULL, so if the value is 0, we know we don't have to check this thread's diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index c59733559167ad..8ca1dfbb3f0a6b 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -14,7 +14,6 @@ extern "C" { /* ceval state */ struct _ceval_runtime_state { - int recursion_limit; struct _gil_runtime_state gil; }; diff --git a/Python/ceval.c b/Python/ceval.c index 6b002730c8d789..601e21a2fccd29 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -699,7 +699,6 @@ int _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT; void _PyEval_InitRuntimeState(struct _ceval_runtime_state *ceval) { - ceval->recursion_limit = Py_DEFAULT_RECURSION_LIMIT; _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT; _gil_initialize(&ceval->gil); } @@ -707,6 +706,8 @@ _PyEval_InitRuntimeState(struct _ceval_runtime_state *ceval) int _PyEval_InitState(struct _ceval_state *ceval) { + ceval->recursion_limit = Py_DEFAULT_RECURSION_LIMIT; + struct _pending_calls *pending = &ceval->pending; assert(pending->lock == NULL); @@ -730,16 +731,18 @@ _PyEval_FiniState(struct _ceval_state *ceval) int Py_GetRecursionLimit(void) { - struct _ceval_runtime_state *ceval = &_PyRuntime.ceval; - return ceval->recursion_limit; + PyThreadState *tstate = _PyThreadState_GET(); + return tstate->interp->ceval.recursion_limit; } void Py_SetRecursionLimit(int new_limit) { - struct _ceval_runtime_state *ceval = &_PyRuntime.ceval; - ceval->recursion_limit = new_limit; - _Py_CheckRecursionLimit = new_limit; + PyThreadState *tstate = _PyThreadState_GET(); + tstate->interp->ceval.recursion_limit = new_limit; + if (_Py_IsMainInterpreter(tstate)) { + _Py_CheckRecursionLimit = new_limit; + } } /* The function _Py_EnterRecursiveCall() only calls _Py_CheckRecursiveCall() @@ -750,8 +753,7 @@ Py_SetRecursionLimit(int new_limit) int _Py_CheckRecursiveCall(PyThreadState *tstate, const char *where) { - _PyRuntimeState *runtime = tstate->interp->runtime; - int recursion_limit = runtime->ceval.recursion_limit; + int recursion_limit = tstate->interp->ceval.recursion_limit; #ifdef USE_STACKCHECK tstate->stackcheck_counter = 0; @@ -760,8 +762,10 @@ _Py_CheckRecursiveCall(PyThreadState *tstate, const char *where) _PyErr_SetString(tstate, PyExc_MemoryError, "Stack overflow"); return -1; } - /* Needed for ABI backwards-compatibility (see bpo-31857) */ - _Py_CheckRecursionLimit = recursion_limit; + if (_Py_IsMainInterpreter(tstate)) { + /* Needed for ABI backwards-compatibility (see bpo-31857) */ + _Py_CheckRecursionLimit = recursion_limit; + } #endif if (tstate->recursion_critical) /* Somebody asked that we don't check for recursion. */ From b0be6b3b94fbdf31b796adc19dc86a04a52b03e1 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 17:07:41 +0200 Subject: [PATCH 007/115] bpo-29587: _PyErr_ChainExceptions() checks exception (GH-19902) _PyErr_ChainExceptions() now ensures that the first parameter is an exception type, as done by _PyErr_SetObject(). * The following function now check PyExceptionInstance_Check() in an assertion using a new _PyBaseExceptionObject_cast() helper function: * PyException_GetTraceback(), PyException_SetTraceback() * PyException_GetCause(), PyException_SetCause() * PyException_GetContext(), PyException_SetContext() * PyExceptionClass_Name() now checks PyExceptionClass_Check() with an assertion. * Remove XXX comment and add gi_exc_state variable to _gen_throw(). * Remove comment from test_generators --- Lib/test/test_generators.py | 3 --- Objects/exceptions.c | 36 +++++++++++++++++++++++++----------- Objects/genobject.c | 18 +++++++++--------- Python/errors.c | 12 +++++++++++- 4 files changed, 45 insertions(+), 24 deletions(-) diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 5824ecd7c37e88..e0478011996807 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -342,9 +342,6 @@ def g(): try: yield except Exception: - # Without the `gi_exc_state.exc_type != Py_None` in - # _gen_throw(), this line was causing a crash ("Segmentation - # fault (core dumped)") on e.g. Fedora 32. raise RuntimeError gen = g() diff --git a/Objects/exceptions.c b/Objects/exceptions.c index ca917b436c4bb7..db5e3da12b00f3 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -304,22 +304,33 @@ static PyGetSetDef BaseException_getset[] = { }; +static inline PyBaseExceptionObject* +_PyBaseExceptionObject_cast(PyObject *exc) +{ + assert(PyExceptionInstance_Check(exc)); + return (PyBaseExceptionObject *)exc; +} + + PyObject * -PyException_GetTraceback(PyObject *self) { - PyBaseExceptionObject *base_self = (PyBaseExceptionObject *)self; +PyException_GetTraceback(PyObject *self) +{ + PyBaseExceptionObject *base_self = _PyBaseExceptionObject_cast(self); Py_XINCREF(base_self->traceback); return base_self->traceback; } int -PyException_SetTraceback(PyObject *self, PyObject *tb) { - return BaseException_set_tb((PyBaseExceptionObject *)self, tb, NULL); +PyException_SetTraceback(PyObject *self, PyObject *tb) +{ + return BaseException_set_tb(_PyBaseExceptionObject_cast(self), tb, NULL); } PyObject * -PyException_GetCause(PyObject *self) { - PyObject *cause = ((PyBaseExceptionObject *)self)->cause; +PyException_GetCause(PyObject *self) +{ + PyObject *cause = _PyBaseExceptionObject_cast(self)->cause; Py_XINCREF(cause); return cause; } @@ -328,13 +339,15 @@ PyException_GetCause(PyObject *self) { void PyException_SetCause(PyObject *self, PyObject *cause) { - ((PyBaseExceptionObject *)self)->suppress_context = 1; - Py_XSETREF(((PyBaseExceptionObject *)self)->cause, cause); + PyBaseExceptionObject *base_self = _PyBaseExceptionObject_cast(self); + base_self->suppress_context = 1; + Py_XSETREF(base_self->cause, cause); } PyObject * -PyException_GetContext(PyObject *self) { - PyObject *context = ((PyBaseExceptionObject *)self)->context; +PyException_GetContext(PyObject *self) +{ + PyObject *context = _PyBaseExceptionObject_cast(self)->context; Py_XINCREF(context); return context; } @@ -343,7 +356,7 @@ PyException_GetContext(PyObject *self) { void PyException_SetContext(PyObject *self, PyObject *context) { - Py_XSETREF(((PyBaseExceptionObject *)self)->context, context); + Py_XSETREF(_PyBaseExceptionObject_cast(self)->context, context); } #undef PyExceptionClass_Name @@ -351,6 +364,7 @@ PyException_SetContext(PyObject *self, PyObject *context) const char * PyExceptionClass_Name(PyObject *ob) { + assert(PyExceptionClass_Check(ob)); return ((PyTypeObject*)ob)->tp_name; } diff --git a/Objects/genobject.c b/Objects/genobject.c index b27fa929a26258..5b253edfdcd0f6 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -512,15 +512,15 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, } PyErr_Restore(typ, val, tb); - /* XXX It seems like we shouldn't have to check not equal to Py_None - here because exc_type should only ever be a class. But not including - this check was causing crashes on certain tests e.g. on Fedora. */ - if (gen->gi_exc_state.exc_type && gen->gi_exc_state.exc_type != Py_None) { - Py_INCREF(gen->gi_exc_state.exc_type); - Py_XINCREF(gen->gi_exc_state.exc_value); - Py_XINCREF(gen->gi_exc_state.exc_traceback); - _PyErr_ChainExceptions(gen->gi_exc_state.exc_type, - gen->gi_exc_state.exc_value, gen->gi_exc_state.exc_traceback); + + _PyErr_StackItem *gi_exc_state = &gen->gi_exc_state; + if (gi_exc_state->exc_type != NULL && gi_exc_state->exc_type != Py_None) { + Py_INCREF(gi_exc_state->exc_type); + Py_XINCREF(gi_exc_state->exc_value); + Py_XINCREF(gi_exc_state->exc_traceback); + _PyErr_ChainExceptions(gi_exc_state->exc_type, + gi_exc_state->exc_value, + gi_exc_state->exc_traceback); } return gen_send_ex(gen, Py_None, 1, 0); diff --git a/Python/errors.c b/Python/errors.c index 9e53d050416ff1..f856a798eed1e5 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -107,7 +107,8 @@ _PyErr_SetObject(PyThreadState *tstate, PyObject *exception, PyObject *value) if (exception != NULL && !PyExceptionClass_Check(exception)) { _PyErr_Format(tstate, PyExc_SystemError, - "exception %R not a BaseException subclass", + "_PyErr_SetObject: " + "exception %R is not a BaseException subclass", exception); return; } @@ -484,6 +485,15 @@ _PyErr_ChainExceptions(PyObject *exc, PyObject *val, PyObject *tb) return; PyThreadState *tstate = _PyThreadState_GET(); + + if (!PyExceptionClass_Check(exc)) { + _PyErr_Format(tstate, PyExc_SystemError, + "_PyErr_ChainExceptions: " + "exception %R is not a BaseException subclass", + exc); + return; + } + if (_PyErr_Occurred(tstate)) { PyObject *exc2, *val2, *tb2; _PyErr_Fetch(tstate, &exc2, &val2, &tb2); From 6351d9e4400a77fe1fcbe4f03e5fb6620cca236d Mon Sep 17 00:00:00 2001 From: Hai Shi Date: Tue, 5 May 2020 10:20:38 -0500 Subject: [PATCH 008/115] bpo-40520: Remove redundant comment in pydebug.h (GH-19931) Automerge-Triggered-By: @corona10 --- Include/pydebug.h | 2 -- 1 file changed, 2 deletions(-) diff --git a/Include/pydebug.h b/Include/pydebug.h index bd4aafe3b49f83..78bcb118be4659 100644 --- a/Include/pydebug.h +++ b/Include/pydebug.h @@ -5,8 +5,6 @@ extern "C" { #endif -/* These global variable are defined in pylifecycle.c */ -/* XXX (ncoghlan): move these declarations to pylifecycle.h? */ PyAPI_DATA(int) Py_DebugFlag; PyAPI_DATA(int) Py_VerboseFlag; PyAPI_DATA(int) Py_QuietFlag; From 299b8c61e9d1a42b929b8deb1b05067876e191e6 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 17:40:18 +0200 Subject: [PATCH 009/115] Revert "bpo-40513: Per-interpreter signals pending (GH-19924)" (GH-19932) This reverts commit 4e01946cafca0cf49f796c3118e0d65237bcad69. --- Include/internal/pycore_interp.h | 2 - Include/internal/pycore_runtime.h | 5 +++ Python/ceval.c | 65 +++++++++++++++++-------------- Python/ceval_gil.h | 2 +- 4 files changed, 42 insertions(+), 32 deletions(-) diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 08291012365edc..5bf8998e673206 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -46,8 +46,6 @@ struct _ceval_state { /* Request for dropping the GIL */ _Py_atomic_int gil_drop_request; struct _pending_calls pending; - /* Request for checking signals. */ - _Py_atomic_int signals_pending; }; diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 8ca1dfbb3f0a6b..34eb492b9f254f 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -14,6 +14,11 @@ extern "C" { /* ceval state */ struct _ceval_runtime_state { + /* Request for checking signals. It is shared by all interpreters (see + bpo-40513). Any thread of any interpreter can receive a signal, but only + the main thread of the main interpreter can handle signals: see + _Py_ThreadCanHandleSignals(). */ + _Py_atomic_int signals_pending; struct _gil_runtime_state gil; }; diff --git a/Python/ceval.c b/Python/ceval.c index 601e21a2fccd29..0c08a76f7d1130 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -143,70 +143,76 @@ is_tstate_valid(PyThreadState *tstate) the GIL eventually anyway. */ static inline void COMPUTE_EVAL_BREAKER(PyInterpreterState *interp, - struct _ceval_state *ceval) + struct _ceval_runtime_state *ceval, + struct _ceval_state *ceval2) { - _Py_atomic_store_relaxed(&ceval->eval_breaker, - _Py_atomic_load_relaxed(&ceval->gil_drop_request) + _Py_atomic_store_relaxed(&ceval2->eval_breaker, + _Py_atomic_load_relaxed(&ceval2->gil_drop_request) | (_Py_atomic_load_relaxed(&ceval->signals_pending) && _Py_ThreadCanHandleSignals(interp)) - | (_Py_atomic_load_relaxed(&ceval->pending.calls_to_do) + | (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do) && _Py_ThreadCanHandlePendingCalls()) - | ceval->pending.async_exc); + | ceval2->pending.async_exc); } static inline void SET_GIL_DROP_REQUEST(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->gil_drop_request, 1); - _Py_atomic_store_relaxed(&ceval->eval_breaker, 1); + struct _ceval_state *ceval2 = &interp->ceval; + _Py_atomic_store_relaxed(&ceval2->gil_drop_request, 1); + _Py_atomic_store_relaxed(&ceval2->eval_breaker, 1); } static inline void RESET_GIL_DROP_REQUEST(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->gil_drop_request, 0); - COMPUTE_EVAL_BREAKER(interp, ceval); + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; + _Py_atomic_store_relaxed(&ceval2->gil_drop_request, 0); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } static inline void SIGNAL_PENDING_CALLS(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->pending.calls_to_do, 1); - COMPUTE_EVAL_BREAKER(interp, ceval); + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; + _Py_atomic_store_relaxed(&ceval2->pending.calls_to_do, 1); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } static inline void UNSIGNAL_PENDING_CALLS(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->pending.calls_to_do, 0); - COMPUTE_EVAL_BREAKER(interp, ceval); + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; + _Py_atomic_store_relaxed(&ceval2->pending.calls_to_do, 0); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } static inline void SIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; _Py_atomic_store_relaxed(&ceval->signals_pending, 1); /* eval_breaker is not set to 1 if thread_can_handle_signals() is false */ - COMPUTE_EVAL_BREAKER(interp, ceval); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } static inline void UNSIGNAL_PENDING_SIGNALS(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; _Py_atomic_store_relaxed(&ceval->signals_pending, 0); - COMPUTE_EVAL_BREAKER(interp, ceval); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } @@ -222,9 +228,10 @@ SIGNAL_ASYNC_EXC(PyInterpreterState *interp) static inline void UNSIGNAL_ASYNC_EXC(PyInterpreterState *interp) { - struct _ceval_state *ceval = &interp->ceval; - ceval->pending.async_exc = 0; - COMPUTE_EVAL_BREAKER(interp, ceval); + struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; + ceval2->pending.async_exc = 0; + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } @@ -349,11 +356,12 @@ PyEval_ReleaseLock(void) { _PyRuntimeState *runtime = &_PyRuntime; PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); - struct _ceval_state *ceval2 = &tstate->interp->ceval; /* This function must succeed when the current thread state is NULL. We therefore avoid PyThreadState_Get() which dumps a fatal error in debug mode. */ - drop_gil(&runtime->ceval, ceval2, tstate); + struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; + drop_gil(ceval, ceval2, tstate); } void @@ -435,7 +443,6 @@ PyThreadState * PyEval_SaveThread(void) { _PyRuntimeState *runtime = &_PyRuntime; - PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, NULL); ensure_tstate_not_null(__func__, tstate); @@ -831,16 +838,16 @@ eval_frame_handle_pending(PyThreadState *tstate) { _PyRuntimeState * const runtime = &_PyRuntime; struct _ceval_runtime_state *ceval = &runtime->ceval; - struct _ceval_state *ceval2 = &tstate->interp->ceval; /* Pending signals */ - if (_Py_atomic_load_relaxed(&ceval2->signals_pending)) { + if (_Py_atomic_load_relaxed(&ceval->signals_pending)) { if (handle_signals(tstate) != 0) { return -1; } } /* Pending calls */ + struct _ceval_state *ceval2 = &tstate->interp->ceval; if (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do)) { if (make_pending_calls(tstate) != 0) { return -1; diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h index db47077d5c1ce1..f25f8100732942 100644 --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -305,7 +305,7 @@ take_gil(PyThreadState *tstate) handle signals. Note: RESET_GIL_DROP_REQUEST() calls COMPUTE_EVAL_BREAKER(). */ - COMPUTE_EVAL_BREAKER(interp, ceval2); + COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } /* Don't access tstate if the thread must exit */ From 607b1027fec7b4a1602aab7df57795fbcec1c51b Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 18:50:30 +0200 Subject: [PATCH 010/115] bpo-40521: Disable Unicode caches in isolated subinterpreters (GH-19933) When Python is built in the experimental isolated subinterpreters mode, disable Unicode singletons and Unicode interned strings since they are shared by all interpreters. Temporary workaround until these caches are made per-interpreter. --- Objects/typeobject.c | 16 +++++++++ Objects/unicodeobject.c | 78 +++++++++++++++++++++++++++++++++-------- 2 files changed, 79 insertions(+), 15 deletions(-) diff --git a/Objects/typeobject.c b/Objects/typeobject.c index db0ae970090ba9..1565b90898605e 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -56,6 +56,11 @@ static size_t method_cache_misses = 0; static size_t method_cache_collisions = 0; #endif +/* bpo-40521: Interned strings are shared by all subinterpreters */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define INTERN_NAME_STRINGS +#endif + /* alphabetical order */ _Py_IDENTIFIER(__abstractmethods__); _Py_IDENTIFIER(__class__); @@ -3418,6 +3423,7 @@ type_setattro(PyTypeObject *type, PyObject *name, PyObject *value) if (name == NULL) return -1; } +#ifdef INTERN_NAME_STRINGS if (!PyUnicode_CHECK_INTERNED(name)) { PyUnicode_InternInPlace(&name); if (!PyUnicode_CHECK_INTERNED(name)) { @@ -3427,6 +3433,7 @@ type_setattro(PyTypeObject *type, PyObject *name, PyObject *value) return -1; } } +#endif } else { /* Will fail in _PyObject_GenericSetAttrWithDict. */ @@ -7531,10 +7538,17 @@ _PyTypes_InitSlotDefs(void) for (slotdef *p = slotdefs; p->name; p++) { /* Slots must be ordered by their offset in the PyHeapTypeObject. */ assert(!p[1].name || p->offset <= p[1].offset); +#ifdef INTERN_NAME_STRINGS p->name_strobj = PyUnicode_InternFromString(p->name); if (!p->name_strobj || !PyUnicode_CHECK_INTERNED(p->name_strobj)) { return _PyStatus_NO_MEMORY(); } +#else + p->name_strobj = PyUnicode_FromString(p->name); + if (!p->name_strobj) { + return _PyStatus_NO_MEMORY(); + } +#endif } slotdefs_initialized = 1; return _PyStatus_OK(); @@ -7559,7 +7573,9 @@ update_slot(PyTypeObject *type, PyObject *name) int offset; assert(PyUnicode_CheckExact(name)); +#ifdef INTERN_NAME_STRINGS assert(PyUnicode_CHECK_INTERNED(name)); +#endif assert(slotdefs_initialized); pp = ptrs; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index aba7407533c4ed..18b9458721de18 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -198,6 +198,11 @@ extern "C" { # define OVERALLOCATE_FACTOR 4 #endif +/* bpo-40521: Interned strings are shared by all interpreters. */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define INTERNED_STRINGS +#endif + /* This dictionary holds all interned unicode strings. Note that references to strings in this dictionary are *not* counted in the string's ob_refcnt. When the interned string reaches a refcnt of 0 the string deallocation @@ -206,7 +211,9 @@ extern "C" { Another way to look at this is that to say that the actual reference count of a string is: s->ob_refcnt + (s->state ? 2 : 0) */ +#ifdef INTERNED_STRINGS static PyObject *interned = NULL; +#endif /* The empty Unicode object is shared to improve performance. */ static PyObject *unicode_empty = NULL; @@ -281,9 +288,16 @@ unicode_decode_utf8(const char *s, Py_ssize_t size, /* List of static strings. */ static _Py_Identifier *static_strings = NULL; +/* bpo-40521: Latin1 singletons are shared by all interpreters. */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define LATIN1_SINGLETONS +#endif + +#ifdef LATIN1_SINGLETONS /* Single character Unicode strings in the Latin-1 range are being shared as well. */ static PyObject *unicode_latin1[256] = {NULL}; +#endif /* Fast detection of the most frequent whitespace characters */ const unsigned char _Py_ascii_whitespace[] = { @@ -662,6 +676,7 @@ unicode_result_ready(PyObject *unicode) return unicode_empty; } +#ifdef LATIN1_SINGLETONS if (length == 1) { const void *data = PyUnicode_DATA(unicode); int kind = PyUnicode_KIND(unicode); @@ -683,6 +698,7 @@ unicode_result_ready(PyObject *unicode) } } } +#endif assert(_PyUnicode_CheckConsistency(unicode, 1)); return unicode; @@ -1913,10 +1929,12 @@ unicode_dealloc(PyObject *unicode) case SSTATE_INTERNED_MORTAL: /* revive dead object temporarily for DelItem */ Py_SET_REFCNT(unicode, 3); +#ifdef INTERNED_STRINGS if (PyDict_DelItem(interned, unicode) != 0) { _PyErr_WriteUnraisableMsg("deletion of interned string failed", NULL); } +#endif break; case SSTATE_INTERNED_IMMORTAL: @@ -1944,15 +1962,18 @@ unicode_dealloc(PyObject *unicode) static int unicode_is_singleton(PyObject *unicode) { - PyASCIIObject *ascii = (PyASCIIObject *)unicode; - if (unicode == unicode_empty) + if (unicode == unicode_empty) { return 1; + } +#ifdef LATIN1_SINGLETONS + PyASCIIObject *ascii = (PyASCIIObject *)unicode; if (ascii->state.kind != PyUnicode_WCHAR_KIND && ascii->length == 1) { Py_UCS4 ch = PyUnicode_READ_CHAR(unicode, 0); if (ch < 256 && unicode_latin1[ch] == unicode) return 1; } +#endif return 0; } #endif @@ -2094,16 +2115,28 @@ unicode_write_cstr(PyObject *unicode, Py_ssize_t index, static PyObject* get_latin1_char(unsigned char ch) { - PyObject *unicode = unicode_latin1[ch]; + PyObject *unicode; + +#ifdef LATIN1_SINGLETONS + unicode = unicode_latin1[ch]; + if (unicode) { + Py_INCREF(unicode); + return unicode; + } +#endif + + unicode = PyUnicode_New(1, ch); if (!unicode) { - unicode = PyUnicode_New(1, ch); - if (!unicode) - return NULL; - PyUnicode_1BYTE_DATA(unicode)[0] = ch; - assert(_PyUnicode_CheckConsistency(unicode, 1)); - unicode_latin1[ch] = unicode; + return NULL; } + + PyUnicode_1BYTE_DATA(unicode)[0] = ch; + assert(_PyUnicode_CheckConsistency(unicode, 1)); + +#ifdef LATIN1_SINGLETONS Py_INCREF(unicode); + unicode_latin1[ch] = unicode; +#endif return unicode; } @@ -11270,7 +11303,6 @@ int _PyUnicode_EqualToASCIIId(PyObject *left, _Py_Identifier *right) { PyObject *right_uni; - Py_hash_t hash; assert(_PyUnicode_CHECK(left)); assert(right->string); @@ -11302,10 +11334,12 @@ _PyUnicode_EqualToASCIIId(PyObject *left, _Py_Identifier *right) if (PyUnicode_CHECK_INTERNED(left)) return 0; +#ifdef INTERNED_STRINGS assert(_PyUnicode_HASH(right_uni) != -1); - hash = _PyUnicode_HASH(left); + Py_hash_t hash = _PyUnicode_HASH(left); if (hash != -1 && hash != _PyUnicode_HASH(right_uni)) return 0; +#endif return unicode_compare_eq(left, right_uni); } @@ -15487,20 +15521,26 @@ void PyUnicode_InternInPlace(PyObject **p) { PyObject *s = *p; - PyObject *t; #ifdef Py_DEBUG assert(s != NULL); assert(_PyUnicode_CHECK(s)); #else - if (s == NULL || !PyUnicode_Check(s)) + if (s == NULL || !PyUnicode_Check(s)) { return; + } #endif + /* If it's a subclass, we don't really know what putting it in the interned dict might do. */ - if (!PyUnicode_CheckExact(s)) + if (!PyUnicode_CheckExact(s)) { return; - if (PyUnicode_CHECK_INTERNED(s)) + } + + if (PyUnicode_CHECK_INTERNED(s)) { return; + } + +#ifdef INTERNED_STRINGS if (interned == NULL) { interned = PyDict_New(); if (interned == NULL) { @@ -15508,22 +15548,28 @@ PyUnicode_InternInPlace(PyObject **p) return; } } + + PyObject *t; Py_ALLOW_RECURSION t = PyDict_SetDefault(interned, s, s); Py_END_ALLOW_RECURSION + if (t == NULL) { PyErr_Clear(); return; } + if (t != s) { Py_INCREF(t); Py_SETREF(*p, t); return; } + /* The two references in interned are not counted by refcnt. The deallocator will take care of this */ Py_SET_REFCNT(s, Py_REFCNT(s) - 2); _PyUnicode_STATE(s).interned = SSTATE_INTERNED_MORTAL; +#endif } void @@ -16109,9 +16155,11 @@ _PyUnicode_Fini(PyThreadState *tstate) Py_CLEAR(unicode_empty); +#ifdef LATIN1_SINGLETONS for (Py_ssize_t i = 0; i < 256; i++) { Py_CLEAR(unicode_latin1[i]); } +#endif _PyUnicode_ClearStaticStrings(); } From ac4bf424119d1300f57929120968e216a85d3a25 Mon Sep 17 00:00:00 2001 From: Steve Dower Date: Tue, 5 May 2020 18:45:35 +0100 Subject: [PATCH 011/115] bpo-40458: Increase reserved stack space to prevent overflow crash on Windows (GH-19845) --- .../NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst | 1 + PCbuild/python_uwp.vcxproj | 1 + PCbuild/pythonw_uwp.vcxproj | 1 + 3 files changed, 3 insertions(+) create mode 100644 Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst diff --git a/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst b/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst new file mode 100644 index 00000000000000..4dc1ff480df87a --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst @@ -0,0 +1 @@ +Increase reserved stack space to prevent overflow crash on Windows. diff --git a/PCbuild/python_uwp.vcxproj b/PCbuild/python_uwp.vcxproj index 5ff120a0da331a..fb27e9e71222e3 100644 --- a/PCbuild/python_uwp.vcxproj +++ b/PCbuild/python_uwp.vcxproj @@ -95,6 +95,7 @@ windowsapp.lib;%(AdditionalDependencies) Console + 2000000 diff --git a/PCbuild/pythonw_uwp.vcxproj b/PCbuild/pythonw_uwp.vcxproj index 828d0d1ccac217..e21e46a1b722ed 100644 --- a/PCbuild/pythonw_uwp.vcxproj +++ b/PCbuild/pythonw_uwp.vcxproj @@ -95,6 +95,7 @@ windowsapp.lib;%(AdditionalDependencies) Windows + 2000000 From b4b53868d7d6cd13505321d3802fd00865b25e05 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 19:55:29 +0200 Subject: [PATCH 012/115] bpo-40521: Disable free lists in subinterpreters (GH-19937) When Python is built with experimental isolated interpreters, disable tuple, dict and free free lists. Temporary workaround until these caches are made per-interpreter. Add frame_alloc() and frame_get_builtins() subfunctions to simplify _PyFrame_New_NoTrack(). --- Objects/dictobject.c | 37 +++++++- Objects/frameobject.c | 197 ++++++++++++++++++++++++++---------------- Objects/tupleobject.c | 8 ++ 3 files changed, 162 insertions(+), 80 deletions(-) diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 9c35f3c3f14d01..fa35d16478f635 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -250,16 +250,26 @@ static uint64_t pydict_global_version = 0; #ifndef PyDict_MAXFREELIST #define PyDict_MAXFREELIST 80 #endif + +/* bpo-40521: dict free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyDict_MAXFREELIST +# define PyDict_MAXFREELIST 0 +#endif + +#if PyDict_MAXFREELIST > 0 static PyDictObject *free_list[PyDict_MAXFREELIST]; static int numfree = 0; static PyDictKeysObject *keys_free_list[PyDict_MAXFREELIST]; static int numfreekeys = 0; +#endif #include "clinic/dictobject.c.h" void _PyDict_ClearFreeList(void) { +#if PyDict_MAXFREELIST > 0 while (numfree) { PyDictObject *op = free_list[--numfree]; assert(PyDict_CheckExact(op)); @@ -268,14 +278,17 @@ _PyDict_ClearFreeList(void) while (numfreekeys) { PyObject_FREE(keys_free_list[--numfreekeys]); } +#endif } /* Print summary info about the state of the optimized allocator */ void _PyDict_DebugMallocStats(FILE *out) { +#if PyDict_MAXFREELIST > 0 _PyDebugAllocatorStats(out, "free PyDictObject", numfree, sizeof(PyDictObject)); +#endif } @@ -553,10 +566,13 @@ static PyDictKeysObject *new_keys_object(Py_ssize_t size) es = sizeof(Py_ssize_t); } +#if PyDict_MAXFREELIST > 0 if (size == PyDict_MINSIZE && numfreekeys > 0) { dk = keys_free_list[--numfreekeys]; } - else { + else +#endif + { dk = PyObject_MALLOC(sizeof(PyDictKeysObject) + es * size + sizeof(PyDictKeyEntry) * usable); @@ -587,10 +603,12 @@ free_keys_object(PyDictKeysObject *keys) Py_XDECREF(entries[i].me_key); Py_XDECREF(entries[i].me_value); } +#if PyDict_MAXFREELIST > 0 if (keys->dk_size == PyDict_MINSIZE && numfreekeys < PyDict_MAXFREELIST) { keys_free_list[numfreekeys++] = keys; return; } +#endif PyObject_FREE(keys); } @@ -603,13 +621,16 @@ new_dict(PyDictKeysObject *keys, PyObject **values) { PyDictObject *mp; assert(keys != NULL); +#if PyDict_MAXFREELIST > 0 if (numfree) { mp = free_list[--numfree]; assert (mp != NULL); assert (Py_IS_TYPE(mp, &PyDict_Type)); _Py_NewReference((PyObject *)mp); } - else { + else +#endif + { mp = PyObject_GC_New(PyDictObject, &PyDict_Type); if (mp == NULL) { dictkeys_decref(keys); @@ -1258,12 +1279,15 @@ dictresize(PyDictObject *mp, Py_ssize_t minsize) #ifdef Py_REF_DEBUG _Py_RefTotal--; #endif +#if PyDict_MAXFREELIST > 0 if (oldkeys->dk_size == PyDict_MINSIZE && numfreekeys < PyDict_MAXFREELIST) { keys_free_list[numfreekeys++] = oldkeys; } - else { + else +#endif + { PyObject_FREE(oldkeys); } } @@ -2005,10 +2029,15 @@ dict_dealloc(PyDictObject *mp) assert(keys->dk_refcnt == 1); dictkeys_decref(keys); } - if (numfree < PyDict_MAXFREELIST && Py_IS_TYPE(mp, &PyDict_Type)) +#if PyDict_MAXFREELIST > 0 + if (numfree < PyDict_MAXFREELIST && Py_IS_TYPE(mp, &PyDict_Type)) { free_list[numfree++] = mp; + } else +#endif + { Py_TYPE(mp)->tp_free((PyObject *)mp); + } Py_TRASHCAN_END } diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 4f5054d32bb011..af32276c98b24a 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -556,11 +556,19 @@ static PyGetSetDef frame_getsetlist[] = { free_list. Else programs creating lots of cyclic trash involving frames could provoke free_list into growing without bound. */ +/* max value for numfree */ +#define PyFrame_MAXFREELIST 200 + +/* bpo-40521: frame free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyFrame_MAXFREELIST +# define PyFrame_MAXFREELIST 0 +#endif +#if PyFrame_MAXFREELIST > 0 static PyFrameObject *free_list = NULL; static int numfree = 0; /* number of frames currently in free_list */ -/* max value for numfree */ -#define PyFrame_MAXFREELIST 200 +#endif static void _Py_HOT_FUNCTION frame_dealloc(PyFrameObject *f) @@ -590,15 +598,19 @@ frame_dealloc(PyFrameObject *f) Py_CLEAR(f->f_trace); co = f->f_code; - if (co->co_zombieframe == NULL) + if (co->co_zombieframe == NULL) { co->co_zombieframe = f; + } +#if PyFrame_MAXFREELIST > 0 else if (numfree < PyFrame_MAXFREELIST) { ++numfree; f->f_back = free_list; free_list = f; } - else +#endif + else { PyObject_GC_Del(f); + } Py_DECREF(co); Py_TRASHCAN_SAFE_END(f) @@ -759,98 +771,127 @@ PyTypeObject PyFrame_Type = { _Py_IDENTIFIER(__builtins__); -PyFrameObject* _Py_HOT_FUNCTION -_PyFrame_New_NoTrack(PyThreadState *tstate, PyCodeObject *code, - PyObject *globals, PyObject *locals) +static inline PyFrameObject* +frame_alloc(PyCodeObject *code) { - PyFrameObject *back = tstate->frame; PyFrameObject *f; - PyObject *builtins; - Py_ssize_t i; -#ifdef Py_DEBUG - if (code == NULL || globals == NULL || !PyDict_Check(globals) || - (locals != NULL && !PyMapping_Check(locals))) { - PyErr_BadInternalCall(); - return NULL; + f = code->co_zombieframe; + if (f != NULL) { + code->co_zombieframe = NULL; + _Py_NewReference((PyObject *)f); + assert(f->f_code == code); + return f; } + + Py_ssize_t ncells = PyTuple_GET_SIZE(code->co_cellvars); + Py_ssize_t nfrees = PyTuple_GET_SIZE(code->co_freevars); + Py_ssize_t extras = code->co_stacksize + code->co_nlocals + ncells + nfrees; +#if PyFrame_MAXFREELIST > 0 + if (free_list == NULL) #endif - if (back == NULL || back->f_globals != globals) { - builtins = _PyDict_GetItemIdWithError(globals, &PyId___builtins__); - if (builtins) { - if (PyModule_Check(builtins)) { - builtins = PyModule_GetDict(builtins); - assert(builtins != NULL); - } + { + f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras); + if (f == NULL) { + return NULL; } - if (builtins == NULL) { - if (PyErr_Occurred()) { + } +#if PyFrame_MAXFREELIST > 0 + else { + assert(numfree > 0); + --numfree; + f = free_list; + free_list = free_list->f_back; + if (Py_SIZE(f) < extras) { + PyFrameObject *new_f = PyObject_GC_Resize(PyFrameObject, f, extras); + if (new_f == NULL) { + PyObject_GC_Del(f); return NULL; } - /* No builtins! Make up a minimal one - Give them 'None', at least. */ - builtins = PyDict_New(); - if (builtins == NULL || - PyDict_SetItemString( - builtins, "None", Py_None) < 0) - return NULL; + f = new_f; } - else - Py_INCREF(builtins); + _Py_NewReference((PyObject *)f); + } +#endif + f->f_code = code; + extras = code->co_nlocals + ncells + nfrees; + f->f_valuestack = f->f_localsplus + extras; + for (Py_ssize_t i=0; if_localsplus[i] = NULL; } - else { + f->f_locals = NULL; + f->f_trace = NULL; + return f; +} + + +static inline PyObject * +frame_get_builtins(PyFrameObject *back, PyObject *globals) +{ + PyObject *builtins; + + if (back != NULL && back->f_globals == globals) { /* If we share the globals, we share the builtins. Save a lookup and a call. */ builtins = back->f_builtins; assert(builtins != NULL); Py_INCREF(builtins); + return builtins; } - if (code->co_zombieframe != NULL) { - f = code->co_zombieframe; - code->co_zombieframe = NULL; - _Py_NewReference((PyObject *)f); - assert(f->f_code == code); + + builtins = _PyDict_GetItemIdWithError(globals, &PyId___builtins__); + if (builtins != NULL && PyModule_Check(builtins)) { + builtins = PyModule_GetDict(builtins); + assert(builtins != NULL); } - else { - Py_ssize_t extras, ncells, nfrees; - ncells = PyTuple_GET_SIZE(code->co_cellvars); - nfrees = PyTuple_GET_SIZE(code->co_freevars); - extras = code->co_stacksize + code->co_nlocals + ncells + - nfrees; - if (free_list == NULL) { - f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, - extras); - if (f == NULL) { - Py_DECREF(builtins); - return NULL; - } - } - else { - assert(numfree > 0); - --numfree; - f = free_list; - free_list = free_list->f_back; - if (Py_SIZE(f) < extras) { - PyFrameObject *new_f = PyObject_GC_Resize(PyFrameObject, f, extras); - if (new_f == NULL) { - PyObject_GC_Del(f); - Py_DECREF(builtins); - return NULL; - } - f = new_f; - } - _Py_NewReference((PyObject *)f); - } + if (builtins != NULL) { + Py_INCREF(builtins); + return builtins; + } + + if (PyErr_Occurred()) { + return NULL; + } + + /* No builtins! Make up a minimal one. + Give them 'None', at least. */ + builtins = PyDict_New(); + if (builtins == NULL) { + return NULL; + } + if (PyDict_SetItemString(builtins, "None", Py_None) < 0) { + Py_DECREF(builtins); + return NULL; + } + return builtins; +} - f->f_code = code; - extras = code->co_nlocals + ncells + nfrees; - f->f_valuestack = f->f_localsplus + extras; - for (i=0; if_localsplus[i] = NULL; - f->f_locals = NULL; - f->f_trace = NULL; + +PyFrameObject* _Py_HOT_FUNCTION +_PyFrame_New_NoTrack(PyThreadState *tstate, PyCodeObject *code, + PyObject *globals, PyObject *locals) +{ +#ifdef Py_DEBUG + if (code == NULL || globals == NULL || !PyDict_Check(globals) || + (locals != NULL && !PyMapping_Check(locals))) { + PyErr_BadInternalCall(); + return NULL; + } +#endif + + PyFrameObject *back = tstate->frame; + PyObject *builtins = frame_get_builtins(back, globals); + if (builtins == NULL) { + return NULL; } + + PyFrameObject *f = frame_alloc(code); + if (f == NULL) { + Py_DECREF(builtins); + return NULL; + } + f->f_stacktop = f->f_valuestack; f->f_builtins = builtins; Py_XINCREF(back); @@ -1142,6 +1183,7 @@ PyFrame_LocalsToFast(PyFrameObject *f, int clear) void _PyFrame_ClearFreeList(void) { +#if PyFrame_MAXFREELIST > 0 while (free_list != NULL) { PyFrameObject *f = free_list; free_list = free_list->f_back; @@ -1149,6 +1191,7 @@ _PyFrame_ClearFreeList(void) --numfree; } assert(numfree == 0); +#endif } void @@ -1161,9 +1204,11 @@ _PyFrame_Fini(void) void _PyFrame_DebugMallocStats(FILE *out) { +#if PyFrame_MAXFREELIST > 0 _PyDebugAllocatorStats(out, "free PyFrameObject", numfree, sizeof(PyFrameObject)); +#endif } diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index f8648d24f1c876..c0b59c009a2e94 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -22,6 +22,12 @@ class tuple "PyTupleObject *" "&PyTuple_Type" #define PyTuple_MAXFREELIST 2000 /* Maximum number of tuples of each size to save */ #endif +/* bpo-40521: tuple free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyTuple_MAXSAVESIZE +# define PyTuple_MAXSAVESIZE 0 +#endif + #if PyTuple_MAXSAVESIZE > 0 /* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, entry 0 is the empty tuple () of which at most one instance will be allocated. @@ -248,7 +254,9 @@ tupledealloc(PyTupleObject *op) #endif } Py_TYPE(op)->tp_free((PyObject *)op); +#if PyTuple_MAXSAVESIZE > 0 done: +#endif Py_TRASHCAN_END } From e838a9324c1719bb917ca81ede8d766b5cb551f4 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 19:56:48 +0200 Subject: [PATCH 013/115] bpo-40522: _PyThreadState_Swap() sets autoTSSkey (GH-19939) In the experimental isolated subinterpreters build mode, _PyThreadState_GET() gets the autoTSSkey variable and _PyThreadState_Swap() sets the autoTSSkey variable. * Add _PyThreadState_GetTSS() * _PyRuntimeState_GetThreadState() and _PyThreadState_GET() return _PyThreadState_GetTSS() * PyEval_SaveThread() sets the autoTSSkey variable to current Python thread state rather than NULL. * eval_frame_handle_pending() doesn't check that _PyThreadState_Swap() result is NULL. * _PyThreadState_Swap() gets the current Python thread state with _PyThreadState_GetTSS() rather than _PyRuntimeGILState_GetThreadState(). * PyGILState_Ensure() no longer checks _PyEval_ThreadsInitialized() since it cannot access the current interpreter. --- Include/internal/pycore_pystate.h | 20 ++++++++++++++++++-- Python/ceval.c | 13 +++++++++++++ Python/pystate.c | 17 +++++++++++++++++ 3 files changed, 48 insertions(+), 2 deletions(-) diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index c82e8db905188c..d96ba31207001a 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -49,8 +49,18 @@ _Py_ThreadCanHandlePendingCalls(void) /* Variable and macro for in-line access to current thread and interpreter state */ -static inline PyThreadState* _PyRuntimeState_GetThreadState(_PyRuntimeState *runtime) { +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +PyAPI_FUNC(PyThreadState*) _PyThreadState_GetTSS(void); +#endif + +static inline PyThreadState* +_PyRuntimeState_GetThreadState(_PyRuntimeState *runtime) +{ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + return _PyThreadState_GetTSS(); +#else return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->gilstate.tstate_current); +#endif } /* Get the current Python thread state. @@ -62,8 +72,14 @@ static inline PyThreadState* _PyRuntimeState_GetThreadState(_PyRuntimeState *run The caller must hold the GIL. See also PyThreadState_Get() and PyThreadState_GET(). */ -static inline PyThreadState *_PyThreadState_GET(void) { +static inline PyThreadState* +_PyThreadState_GET(void) +{ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + return _PyThreadState_GetTSS(); +#else return _PyRuntimeState_GetThreadState(&_PyRuntime); +#endif } /* Redefine PyThreadState_GET() as an alias to _PyThreadState_GET() */ diff --git a/Python/ceval.c b/Python/ceval.c index 0c08a76f7d1130..b5854d34464639 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -380,9 +380,13 @@ PyEval_AcquireThread(PyThreadState *tstate) take_gil(tstate); struct _gilstate_runtime_state *gilstate = &tstate->interp->runtime->gilstate; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + (void)_PyThreadState_Swap(gilstate, tstate); +#else if (_PyThreadState_Swap(gilstate, tstate) != NULL) { Py_FatalError("non-NULL old thread state"); } +#endif } void @@ -443,7 +447,12 @@ PyThreadState * PyEval_SaveThread(void) { _PyRuntimeState *runtime = &_PyRuntime; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyThreadState *old_tstate = _PyThreadState_GET(); + PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, old_tstate); +#else PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, NULL); +#endif ensure_tstate_not_null(__func__, tstate); struct _ceval_runtime_state *ceval = &runtime->ceval; @@ -866,9 +875,13 @@ eval_frame_handle_pending(PyThreadState *tstate) take_gil(tstate); +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + (void)_PyThreadState_Swap(&runtime->gilstate, tstate); +#else if (_PyThreadState_Swap(&runtime->gilstate, tstate) != NULL) { Py_FatalError("orphan tstate"); } +#endif } /* Check for asynchronous exception. */ diff --git a/Python/pystate.c b/Python/pystate.c index dd95750027241b..119fe31a84ba12 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -956,6 +956,14 @@ _PyThreadState_DeleteExcept(_PyRuntimeState *runtime, PyThreadState *tstate) } +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +PyThreadState* +_PyThreadState_GetTSS(void) { + return PyThread_tss_get(&_PyRuntime.gilstate.autoTSSkey); +} +#endif + + PyThreadState * _PyThreadState_UncheckedGet(void) { @@ -975,7 +983,11 @@ PyThreadState_Get(void) PyThreadState * _PyThreadState_Swap(struct _gilstate_runtime_state *gilstate, PyThreadState *newts) { +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyThreadState *oldts = _PyThreadState_GetTSS(); +#else PyThreadState *oldts = _PyRuntimeGILState_GetThreadState(gilstate); +#endif _PyRuntimeGILState_SetThreadState(gilstate, newts); /* It should not be possible for more than one thread state @@ -993,6 +1005,9 @@ _PyThreadState_Swap(struct _gilstate_runtime_state *gilstate, PyThreadState *new Py_FatalError("Invalid thread state for this thread"); errno = err; } +#endif +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyThread_tss_set(&gilstate->autoTSSkey, newts); #endif return oldts; } @@ -1363,7 +1378,9 @@ PyGILState_Ensure(void) /* Ensure that _PyEval_InitThreads() and _PyGILState_Init() have been called by Py_Initialize() */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS assert(_PyEval_ThreadsInitialized(runtime)); +#endif assert(gilstate->autoInterpreterState); PyThreadState *tcur = (PyThreadState *)PyThread_tss_get(&gilstate->autoTSSkey); From 0dd5e7a718997da2026ed64fe054dc36cae4fee7 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 20:16:37 +0200 Subject: [PATCH 014/115] bpo-40513: new_interpreter() init GIL earlier (GH-19942) Fix also code to handle init_interp_main() failure. --- Python/pylifecycle.c | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 5726a559cfcb73..2149d8928d596d 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1575,19 +1575,19 @@ new_interpreter(PyThreadState **tstate_p, int isolated_subinterpreter) } interp->config._isolated_interpreter = isolated_subinterpreter; - status = pycore_interp_init(tstate); + status = init_interp_create_gil(tstate); if (_PyStatus_EXCEPTION(status)) { goto error; } - status = init_interp_main(tstate); + status = pycore_interp_init(tstate); if (_PyStatus_EXCEPTION(status)) { goto error; } - status = init_interp_create_gil(tstate); + status = init_interp_main(tstate); if (_PyStatus_EXCEPTION(status)) { - return status; + goto error; } *tstate_p = tstate; From 7be4e350aadf93c4be5c97b7291d0db2b6bc1dc4 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 20:27:47 +0200 Subject: [PATCH 015/115] bpo-40513: Per-interpreter GIL (GH-19943) In the experimental isolated subinterpreters build mode, the GIL is now per-interpreter. Move gil from _PyRuntimeState.ceval to PyInterpreterState.ceval. new_interpreter() always get the config from the main interpreter. --- Include/internal/pycore_ceval.h | 4 +++ Include/internal/pycore_interp.h | 3 ++ Include/internal/pycore_runtime.h | 2 ++ Python/ceval.c | 48 +++++++++++++++++++++++++++++-- Python/ceval_gil.h | 24 ++++++++++++++-- Python/pylifecycle.c | 6 +++- 6 files changed, 82 insertions(+), 5 deletions(-) diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 18c8f027af16e6..368990099089fe 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -50,7 +50,11 @@ extern PyObject *_PyEval_EvalCode( PyObject *kwdefs, PyObject *closure, PyObject *name, PyObject *qualname); +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +extern int _PyEval_ThreadsInitialized(PyInterpreterState *interp); +#else extern int _PyEval_ThreadsInitialized(struct pyruntimestate *runtime); +#endif extern PyStatus _PyEval_InitGIL(PyThreadState *tstate); extern void _PyEval_FiniGIL(PyThreadState *tstate); diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 5bf8998e673206..26e7a473a12dc6 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -46,6 +46,9 @@ struct _ceval_state { /* Request for dropping the GIL */ _Py_atomic_int gil_drop_request; struct _pending_calls pending; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state gil; +#endif }; diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 34eb492b9f254f..ebdc12b23a9ca6 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -19,7 +19,9 @@ struct _ceval_runtime_state { the main thread of the main interpreter can handle signals: see _Py_ThreadCanHandleSignals(). */ _Py_atomic_int signals_pending; +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS struct _gil_runtime_state gil; +#endif }; /* GIL state */ diff --git a/Python/ceval.c b/Python/ceval.c index b5854d34464639..6435bd05446aa2 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -250,6 +250,21 @@ ensure_tstate_not_null(const char *func, PyThreadState *tstate) } +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +int +_PyEval_ThreadsInitialized(PyInterpreterState *interp) +{ + return gil_created(&interp->ceval.gil); +} + +int +PyEval_ThreadsInitialized(void) +{ + // Fatal error if there is no current interpreter + PyInterpreterState *interp = PyInterpreterState_Get(); + return _PyEval_ThreadsInitialized(interp); +} +#else int _PyEval_ThreadsInitialized(_PyRuntimeState *runtime) { @@ -262,18 +277,25 @@ PyEval_ThreadsInitialized(void) _PyRuntimeState *runtime = &_PyRuntime; return _PyEval_ThreadsInitialized(runtime); } +#endif PyStatus _PyEval_InitGIL(PyThreadState *tstate) { +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS if (!_Py_IsMainInterpreter(tstate)) { /* Currently, the GIL is shared by all interpreters, and only the main interpreter is responsible to create and destroy it. */ return _PyStatus_OK(); } +#endif +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; +#else struct _gil_runtime_state *gil = &tstate->interp->runtime->ceval.gil; +#endif assert(!gil_created(gil)); PyThread_init_thread(); @@ -288,14 +310,20 @@ _PyEval_InitGIL(PyThreadState *tstate) void _PyEval_FiniGIL(PyThreadState *tstate) { +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS if (!_Py_IsMainInterpreter(tstate)) { /* Currently, the GIL is shared by all interpreters, and only the main interpreter is responsible to create and destroy it. */ return; } +#endif +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; +#else struct _gil_runtime_state *gil = &tstate->interp->runtime->ceval.gil; +#endif if (!gil_created(gil)) { /* First Py_InitializeFromConfig() call: the GIL doesn't exist yet: do nothing. */ @@ -413,13 +441,18 @@ PyEval_ReleaseThread(PyThreadState *tstate) void _PyEval_ReInitThreads(_PyRuntimeState *runtime) { + PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); + ensure_tstate_not_null(__func__, tstate); + +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; +#else struct _gil_runtime_state *gil = &runtime->ceval.gil; +#endif if (!gil_created(gil)) { return; } recreate_gil(gil); - PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); - ensure_tstate_not_null(__func__, tstate); take_gil(tstate); @@ -457,7 +490,11 @@ PyEval_SaveThread(void) struct _ceval_runtime_state *ceval = &runtime->ceval; struct _ceval_state *ceval2 = &tstate->interp->ceval; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + assert(gil_created(&ceval2->gil)); +#else assert(gil_created(&ceval->gil)); +#endif drop_gil(ceval, ceval2, tstate); return tstate; } @@ -716,7 +753,9 @@ void _PyEval_InitRuntimeState(struct _ceval_runtime_state *ceval) { _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT; +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS _gil_initialize(&ceval->gil); +#endif } int @@ -731,6 +770,11 @@ _PyEval_InitState(struct _ceval_state *ceval) if (pending->lock == NULL) { return -1; } + +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + _gil_initialize(&ceval->gil); +#endif + return 0; } diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h index f25f8100732942..56944b89237fb4 100644 --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -144,7 +144,11 @@ static void drop_gil(struct _ceval_runtime_state *ceval, struct _ceval_state *ceval2, PyThreadState *tstate) { +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &ceval2->gil; +#else struct _gil_runtime_state *gil = &ceval->gil; +#endif if (!_Py_atomic_load_relaxed(&gil->locked)) { Py_FatalError("drop_gil: GIL is not locked"); } @@ -228,7 +232,11 @@ take_gil(PyThreadState *tstate) PyInterpreterState *interp = tstate->interp; struct _ceval_runtime_state *ceval = &interp->runtime->ceval; struct _ceval_state *ceval2 = &interp->ceval; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &ceval2->gil; +#else struct _gil_runtime_state *gil = &ceval->gil; +#endif /* Check that _PyEval_InitThreads() was called to create the lock */ assert(gil_created(gil)); @@ -320,10 +328,22 @@ take_gil(PyThreadState *tstate) void _PyEval_SetSwitchInterval(unsigned long microseconds) { - _PyRuntime.ceval.gil.interval = microseconds; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyInterpreterState *interp = PyInterpreterState_Get(); + struct _gil_runtime_state *gil = &interp->ceval.gil; +#else + struct _gil_runtime_state *gil = &_PyRuntime.ceval.gil; +#endif + gil->interval = microseconds; } unsigned long _PyEval_GetSwitchInterval() { - return _PyRuntime.ceval.gil.interval; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyInterpreterState *interp = PyInterpreterState_Get(); + struct _gil_runtime_state *gil = &interp->ceval.gil; +#else + struct _gil_runtime_state *gil = &_PyRuntime.ceval.gil; +#endif + return gil->interval; } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 2149d8928d596d..da66a82ada70a8 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1561,9 +1561,13 @@ new_interpreter(PyThreadState **tstate_p, int isolated_subinterpreter) /* Copy the current interpreter config into the new interpreter */ const PyConfig *config; +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS if (save_tstate != NULL) { config = _PyInterpreterState_GetConfig(save_tstate->interp); - } else { + } + else +#endif + { /* No current thread state, copy from the main interpreter */ PyInterpreterState *main_interp = PyInterpreterState_Main(); config = _PyInterpreterState_GetConfig(main_interp); From fb2c7c4afbab0514352ab0246b0c0cc85d1bba53 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 5 May 2020 20:33:06 +0200 Subject: [PATCH 016/115] bpo-40513: _xxsubinterpreters.run_string() releases the GIL (GH-19944) In the experimental isolated subinterpreters build mode, _xxsubinterpreters.run_string() now releases the GIL. --- Modules/_xxsubinterpretersmodule.c | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index de11c090870f94..8a6fce9e0b4bd9 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -1939,6 +1939,20 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, return -1; } +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + // Switch to interpreter. + PyThreadState *new_tstate = PyInterpreterState_ThreadHead(interp); + PyThreadState *save1 = PyEval_SaveThread(); + + (void)PyThreadState_Swap(new_tstate); + + // Run the script. + _sharedexception *exc = NULL; + int result = _run_script(interp, codestr, shared, &exc); + + // Switch back. + PyEval_RestoreThread(save1); +#else // Switch to interpreter. PyThreadState *save_tstate = NULL; if (interp != PyInterpreterState_Get()) { @@ -1956,6 +1970,7 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, if (save_tstate != NULL) { PyThreadState_Swap(save_tstate); } +#endif // Propagate any exception out to the caller. if (exc != NULL) { From c21c51235aa8061da6b0593d6f857f42fd92fd8b Mon Sep 17 00:00:00 2001 From: Curtis Bucher Date: Tue, 5 May 2020 12:40:56 -0700 Subject: [PATCH 017/115] bpo-40355: Improve error messages in ast.literal_eval with malformed Dict nodes (GH-19868) Co-authored-by: Pablo Galindo --- Lib/ast.py | 11 +++++++---- Lib/test/test_ast.py | 6 ++++++ .../Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst | 2 ++ 3 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst diff --git a/Lib/ast.py b/Lib/ast.py index 5c68c4a66e1dd0..7a43581c0e6ce6 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -62,11 +62,12 @@ def literal_eval(node_or_string): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, Expression): node_or_string = node_or_string.body + def _raise_malformed_node(node): + raise ValueError(f'malformed node or string: {node!r}') def _convert_num(node): - if isinstance(node, Constant): - if type(node.value) in (int, float, complex): - return node.value - raise ValueError('malformed node or string: ' + repr(node)) + if not isinstance(node, Constant) or type(node.value) not in (int, float, complex): + _raise_malformed_node(node) + return node.value def _convert_signed_num(node): if isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)): operand = _convert_num(node.operand) @@ -88,6 +89,8 @@ def _convert(node): node.func.id == 'set' and node.args == node.keywords == []): return set() elif isinstance(node, Dict): + if len(node.keys) != len(node.values): + _raise_malformed_node(node) return dict(zip(map(_convert, node.keys), map(_convert, node.values))) elif isinstance(node, BinOp) and isinstance(node.op, (Add, Sub)): diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 9063b3d2d7b744..a8a13fdcd7426e 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -965,6 +965,12 @@ def test_literal_eval_complex(self): self.assertRaises(ValueError, ast.literal_eval, '3+(0+6j)') self.assertRaises(ValueError, ast.literal_eval, '-(3+6j)') + def test_literal_eval_malformed_dict_nodes(self): + malformed = ast.Dict(keys=[ast.Constant(1), ast.Constant(2)], values=[ast.Constant(3)]) + self.assertRaises(ValueError, ast.literal_eval, malformed) + malformed = ast.Dict(keys=[ast.Constant(1)], values=[ast.Constant(2), ast.Constant(3)]) + self.assertRaises(ValueError, ast.literal_eval, malformed) + def test_bad_integer(self): # issue13436: Bad error message with invalid numeric values body = [ast.ImportFrom(module='time', diff --git a/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst b/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst new file mode 100644 index 00000000000000..81f9e937a2bff4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst @@ -0,0 +1,2 @@ +Improve error reporting in :func:`ast.literal_eval` in the presence of malformed :class:`ast.Dict` +nodes instead of silently ignoring any non-conforming elements. Patch by Curtis Bucher. From 1253c3ef70ea5632d32ae19579a14152db0d45c1 Mon Sep 17 00:00:00 2001 From: Dennis Sweeney <36520290+sweeneyde@users.noreply.github.com> Date: Tue, 5 May 2020 17:14:32 -0400 Subject: [PATCH 018/115] bpo-40504: Allow weakrefs to lru_cache objects (GH-19938) --- Lib/test/test_functools.py | 31 +++++++++++++++++++ .../2020-05-05-17-12-47.bpo-40504.EX6wPn.rst | 1 + Modules/_functoolsmodule.c | 7 ++++- 3 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 9503f4086b1cb9..b3893a15566fa6 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -14,6 +14,8 @@ import unittest import unittest.mock import os +import weakref +import gc from weakref import proxy import contextlib @@ -1938,6 +1940,35 @@ def f(): return 1 self.assertEqual(f.cache_parameters(), {'maxsize': 1000, "typed": True}) + def test_lru_cache_weakrefable(self): + @self.module.lru_cache + def test_function(x): + return x + + class A: + @self.module.lru_cache + def test_method(self, x): + return (self, x) + + @staticmethod + @self.module.lru_cache + def test_staticmethod(x): + return (self, x) + + refs = [weakref.ref(test_function), + weakref.ref(A.test_method), + weakref.ref(A.test_staticmethod)] + + for ref in refs: + self.assertIsNotNone(ref()) + + del A + del test_function + gc.collect() + + for ref in refs: + self.assertIsNone(ref()) + @py_functools.lru_cache() def py_cached_func(x, y): diff --git a/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst b/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst new file mode 100644 index 00000000000000..261a49e4329280 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst @@ -0,0 +1 @@ +:func:`functools.lru_cache` objects can now be the targets of weakrefs. \ No newline at end of file diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index fd4b4c268cc979..d158d3bae157b2 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -783,6 +783,7 @@ typedef struct lru_cache_object { Py_ssize_t misses; PyObject *cache_info_type; PyObject *dict; + PyObject *weakreflist; } lru_cache_object; static PyTypeObject lru_cache_type; @@ -1196,6 +1197,7 @@ lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw) Py_INCREF(cache_info_type); obj->cache_info_type = cache_info_type; obj->dict = NULL; + obj->weakreflist = NULL; return (PyObject *)obj; } @@ -1227,6 +1229,8 @@ lru_cache_dealloc(lru_cache_object *obj) lru_list_elem *list; /* bpo-31095: UnTrack is needed before calling any callbacks */ PyObject_GC_UnTrack(obj); + if (obj->weakreflist != NULL) + PyObject_ClearWeakRefs((PyObject*)obj); list = lru_cache_unlink_list(obj); Py_XDECREF(obj->cache); @@ -1384,7 +1388,8 @@ static PyTypeObject lru_cache_type = { (traverseproc)lru_cache_tp_traverse,/* tp_traverse */ (inquiry)lru_cache_tp_clear, /* tp_clear */ 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ + offsetof(lru_cache_object, weakreflist), + /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ lru_cache_methods, /* tp_methods */ From 96074de573f82fc66a2bd73c36905141a3f1d5c1 Mon Sep 17 00:00:00 2001 From: Pablo Galindo Date: Tue, 5 May 2020 22:58:19 +0100 Subject: [PATCH 019/115] bpo-40523: Add pass-throughs for hash() and reversed() to weakref.proxy objects (GH-19946) --- Lib/test/test_weakref.py | 20 +++++++++++++++++++ .../2020-05-05-20-36-15.bpo-40523.hKZVTB.rst | 2 ++ Objects/weakrefobject.c | 19 +++++++++++++++++- 3 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index 563507fee3d7ea..56a42f055d0b54 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -411,6 +411,26 @@ def __iter__(self): # can be killed in the middle of the call "blech" in p + def test_proxy_reversed(self): + class MyObj: + def __len__(self): + return 3 + def __reversed__(self): + return iter('cba') + + obj = MyObj() + self.assertEqual("".join(reversed(weakref.proxy(obj))), "cba") + + def test_proxy_hash(self): + cool_hash = 299_792_458 + + class MyObj: + def __hash__(self): + return cool_hash + + obj = MyObj() + self.assertEqual(hash(weakref.proxy(obj)), cool_hash) + def test_getweakrefcount(self): o = C() ref1 = weakref.ref(o) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst new file mode 100644 index 00000000000000..14f05be59a1edd --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst @@ -0,0 +1,2 @@ +Add pass-throughs for :func:`hash` and :func:`reversed` to +:class:`weakref.proxy` objects. Patch by Pablo Galindo. diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index 9640d93aaf2daf..313e8abab5a25f 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -665,10 +665,12 @@ proxy_iternext(PyWeakReference *proxy) WRAP_METHOD(proxy_bytes, __bytes__) +WRAP_METHOD(proxy_reversed, __reversed__) static PyMethodDef proxy_methods[] = { {"__bytes__", proxy_bytes, METH_NOARGS}, + {"__reversed__", proxy_reversed, METH_NOARGS}, {NULL, NULL} }; @@ -730,6 +732,21 @@ static PyMappingMethods proxy_as_mapping = { }; +static Py_hash_t +proxy_hash(PyObject *self) +{ + PyWeakReference *proxy = (PyWeakReference *)self; + if (!proxy_checkref(proxy)) { + return -1; + } + PyObject *obj = PyWeakref_GET_OBJECT(proxy); + Py_INCREF(obj); + Py_hash_t res = PyObject_Hash(obj); + Py_DECREF(obj); + return res; +} + + PyTypeObject _PyWeakref_ProxyType = { PyVarObject_HEAD_INIT(&PyType_Type, 0) @@ -746,7 +763,7 @@ _PyWeakref_ProxyType = { &proxy_as_number, /* tp_as_number */ &proxy_as_sequence, /* tp_as_sequence */ &proxy_as_mapping, /* tp_as_mapping */ - 0, /* tp_hash */ + proxy_hash, /* tp_hash */ 0, /* tp_call */ proxy_str, /* tp_str */ proxy_getattr, /* tp_getattro */ From b9c46a2c2d7fc68457bff641f78932d66f5e5f59 Mon Sep 17 00:00:00 2001 From: Tim Peters Date: Tue, 5 May 2020 21:28:24 -0500 Subject: [PATCH 020/115] bpo-40480 "fnmatch" exponential execution time (GH-19908) bpo-40480: create different regexps in the presence of multiple `*` patterns to prevent fnmatch() from taking exponential time. --- Lib/fnmatch.py | 60 ++++++++++++++++--- Lib/test/test_fnmatch.py | 17 ++++++ .../2020-05-04-21-21-43.bpo-40480.mjldWa.rst | 1 + 3 files changed, 71 insertions(+), 7 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst diff --git a/Lib/fnmatch.py b/Lib/fnmatch.py index b98e6413295e1c..d7d915d51314da 100644 --- a/Lib/fnmatch.py +++ b/Lib/fnmatch.py @@ -77,15 +77,19 @@ def translate(pat): There is no way to quote meta-characters. """ + STAR = object() + res = [] + add = res.append i, n = 0, len(pat) - res = '' while i < n: c = pat[i] i = i+1 if c == '*': - res = res + '.*' + # compress consecutive `*` into one + if (not res) or res[-1] is not STAR: + add(STAR) elif c == '?': - res = res + '.' + add('.') elif c == '[': j = i if j < n and pat[j] == '!': @@ -95,7 +99,7 @@ def translate(pat): while j < n and pat[j] != ']': j = j+1 if j >= n: - res = res + '\\[' + add('\\[') else: stuff = pat[i:j] if '--' not in stuff: @@ -122,7 +126,49 @@ def translate(pat): stuff = '^' + stuff[1:] elif stuff[0] in ('^', '['): stuff = '\\' + stuff - res = '%s[%s]' % (res, stuff) + add(f'[{stuff}]') else: - res = res + re.escape(c) - return r'(?s:%s)\Z' % res + add(re.escape(c)) + assert i == n + + # Deal with STARs. + inp = res + res = [] + add = res.append + i, n = 0, len(inp) + # Fixed pieces at the start? + while i < n and inp[i] is not STAR: + add(inp[i]) + i += 1 + # Now deal with STAR fixed STAR fixed ... + # For an interior `STAR fixed` pairing, we want to do a minimal + # .*? match followed by `fixed`, with no possibility of backtracking. + # We can't spell that directly, but can trick it into working by matching + # .*?fixed + # in a lookahead assertion, save the matched part in a group, then + # consume that group via a backreference. If the overall match fails, + # the lookahead assertion won't try alternatives. So the translation is: + # (?=(P.*?fixed))(?P=name) + # Group names are created as needed: g1, g2, g3, ... + groupnum = 0 + while i < n: + assert inp[i] is STAR + i += 1 + if i == n: + add(".*") + break + assert inp[i] is not STAR + fixed = [] + while i < n and inp[i] is not STAR: + fixed.append(inp[i]) + i += 1 + fixed = "".join(fixed) + if i == n: + add(".*") + add(fixed) + else: + groupnum += 1 + add(f"(?=(?P.*?{fixed}))(?P=g{groupnum})") + assert i == n + res = "".join(res) + return fr'(?s:{res})\Z' diff --git a/Lib/test/test_fnmatch.py b/Lib/test/test_fnmatch.py index 55f9f0d3a5425a..4c173069503cc6 100644 --- a/Lib/test/test_fnmatch.py +++ b/Lib/test/test_fnmatch.py @@ -45,6 +45,13 @@ def test_fnmatch(self): check('\nfoo', 'foo*', False) check('\n', '*') + def test_slow_fnmatch(self): + check = self.check_match + check('a' * 50, '*a*a*a*a*a*a*a*a*a*a') + # The next "takes forever" if the regexp translation is + # straightforward. See bpo-40480. + check('a' * 50 + 'b', '*a*a*a*a*a*a*a*a*a*a', False) + def test_mix_bytes_str(self): self.assertRaises(TypeError, fnmatch, 'test', b'*') self.assertRaises(TypeError, fnmatch, b'test', '*') @@ -107,6 +114,16 @@ def test_translate(self): self.assertEqual(translate('[!x]'), r'(?s:[^x])\Z') self.assertEqual(translate('[^x]'), r'(?s:[\^x])\Z') self.assertEqual(translate('[x'), r'(?s:\[x)\Z') + # from the docs + self.assertEqual(translate('*.txt'), r'(?s:.*\.txt)\Z') + # squash consecutive stars + self.assertEqual(translate('*********'), r'(?s:.*)\Z') + self.assertEqual(translate('A*********'), r'(?s:A.*)\Z') + self.assertEqual(translate('*********A'), r'(?s:.*A)\Z') + self.assertEqual(translate('A*********?[?]?'), r'(?s:A.*.[?].)\Z') + # fancy translation to prevent exponential-time match failure + self.assertEqual(translate('**a*a****a'), + r'(?s:(?=(?P.*?a))(?P=g1)(?=(?P.*?a))(?P=g2).*a)\Z') class FilterTestCase(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst b/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst new file mode 100644 index 00000000000000..d046b1422419d7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst @@ -0,0 +1 @@ +``fnmatch.fnmatch()`` could take exponential time in the presence of multiple ``*`` pattern characters. This was repaired by generating more elaborate regular expressions to avoid futile backtracking. \ No newline at end of file From d60040ba226bd2e3b6f58d074015aa2499dc1cb8 Mon Sep 17 00:00:00 2001 From: Batuhan Taskaya Date: Wed, 6 May 2020 08:24:39 +0300 Subject: [PATCH 021/115] bpo-40517: Implement syntax highlighting support for ASDL (#19928) --- Doc/conf.py | 3 +- Doc/library/ast.rst | 2 +- Doc/tools/extensions/asdl_highlight.py | 51 ++++++++++++++++++++++++++ 3 files changed, 54 insertions(+), 2 deletions(-) create mode 100644 Doc/tools/extensions/asdl_highlight.py diff --git a/Doc/conf.py b/Doc/conf.py index 32db34344a70a1..12d74ea24ce4ac 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -14,7 +14,8 @@ # --------------------- extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest', - 'pyspecific', 'c_annotations', 'escape4chm'] + 'pyspecific', 'c_annotations', 'escape4chm', + 'asdl_highlight'] doctest_global_setup = ''' diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index fc04114949c0c3..6c6ad01b842c8e 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -35,7 +35,7 @@ Abstract Grammar The abstract grammar is currently defined as follows: .. literalinclude:: ../../Parser/Python.asdl - :language: none + :language: asdl Node classes diff --git a/Doc/tools/extensions/asdl_highlight.py b/Doc/tools/extensions/asdl_highlight.py new file mode 100644 index 00000000000000..9b003e9905c56c --- /dev/null +++ b/Doc/tools/extensions/asdl_highlight.py @@ -0,0 +1,51 @@ +import os +import sys +sys.path.append(os.path.abspath("../Parser/")) + +from pygments.lexer import RegexLexer, bygroups, include, words +from pygments.token import (Comment, Generic, Keyword, Name, Operator, + Punctuation, Text) + +from asdl import builtin_types +from sphinx.highlighting import lexers + +class ASDLLexer(RegexLexer): + name = "ASDL" + aliases = ["asdl"] + filenames = ["*.asdl"] + _name = r"([^\W\d]\w*)" + _text_ws = r"(\s*)" + + tokens = { + "ws": [ + (r"\n", Text), + (r"\s+", Text), + (r"--.*?$", Comment.Singleline), + ], + "root": [ + include("ws"), + ( + r"(module)" + _text_ws + _name, + bygroups(Keyword, Text, Name.Class), + ), + ( + r"(\w+)(\*\s|\?\s|\s)(\w+)", + bygroups(Name.Variable, Generic.Strong, Name.Tag), + ), + (words(builtin_types), Keyword.Type), + (r"attributes", Name.Builtin), + ( + _name + _text_ws + "(=)", + bygroups(Name.Variable, Text, Operator), + ), + (_name, Name.Function), + (r"\|", Operator), + (r"{|}|\(|\)", Punctuation), + (r".", Text), + ], + } + + +def setup(app): + lexers["asdl"] = ASDLLexer() + return {'version': '1.0', 'parallel_read_safe': True} From eff870b618ca6f6b7a60a271f15af7e54b8a1b97 Mon Sep 17 00:00:00 2001 From: Raymond Hettinger Date: Tue, 5 May 2020 22:33:55 -0700 Subject: [PATCH 022/115] Revert "bpo-40517: Implement syntax highlighting support for ASDL (#19928)" (#19950) This reverts commit d60040ba226bd2e3b6f58d074015aa2499dc1cb8. --- Doc/conf.py | 3 +- Doc/library/ast.rst | 2 +- Doc/tools/extensions/asdl_highlight.py | 51 -------------------------- 3 files changed, 2 insertions(+), 54 deletions(-) delete mode 100644 Doc/tools/extensions/asdl_highlight.py diff --git a/Doc/conf.py b/Doc/conf.py index 12d74ea24ce4ac..32db34344a70a1 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -14,8 +14,7 @@ # --------------------- extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest', - 'pyspecific', 'c_annotations', 'escape4chm', - 'asdl_highlight'] + 'pyspecific', 'c_annotations', 'escape4chm'] doctest_global_setup = ''' diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index 6c6ad01b842c8e..fc04114949c0c3 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -35,7 +35,7 @@ Abstract Grammar The abstract grammar is currently defined as follows: .. literalinclude:: ../../Parser/Python.asdl - :language: asdl + :language: none Node classes diff --git a/Doc/tools/extensions/asdl_highlight.py b/Doc/tools/extensions/asdl_highlight.py deleted file mode 100644 index 9b003e9905c56c..00000000000000 --- a/Doc/tools/extensions/asdl_highlight.py +++ /dev/null @@ -1,51 +0,0 @@ -import os -import sys -sys.path.append(os.path.abspath("../Parser/")) - -from pygments.lexer import RegexLexer, bygroups, include, words -from pygments.token import (Comment, Generic, Keyword, Name, Operator, - Punctuation, Text) - -from asdl import builtin_types -from sphinx.highlighting import lexers - -class ASDLLexer(RegexLexer): - name = "ASDL" - aliases = ["asdl"] - filenames = ["*.asdl"] - _name = r"([^\W\d]\w*)" - _text_ws = r"(\s*)" - - tokens = { - "ws": [ - (r"\n", Text), - (r"\s+", Text), - (r"--.*?$", Comment.Singleline), - ], - "root": [ - include("ws"), - ( - r"(module)" + _text_ws + _name, - bygroups(Keyword, Text, Name.Class), - ), - ( - r"(\w+)(\*\s|\?\s|\s)(\w+)", - bygroups(Name.Variable, Generic.Strong, Name.Tag), - ), - (words(builtin_types), Keyword.Type), - (r"attributes", Name.Builtin), - ( - _name + _text_ws + "(=)", - bygroups(Name.Variable, Text, Operator), - ), - (_name, Name.Function), - (r"\|", Operator), - (r"{|}|\(|\)", Punctuation), - (r".", Text), - ], - } - - -def setup(app): - lexers["asdl"] = ASDLLexer() - return {'version': '1.0', 'parallel_read_safe': True} From 2668a9a5aa506a048aef7b4881c8dcf6b81c6870 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 6 May 2020 15:22:17 +0200 Subject: [PATCH 023/115] bpo-40527: Fix command line argument parsing (GH-19955) --- Lib/test/test_cmd_line.py | 11 +++++++++ .../2020-05-06-14-52-35.bpo-40527.gTNKuy.rst | 2 ++ Python/getopt.c | 23 +++++++++++++------ 3 files changed, 29 insertions(+), 7 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index ee96473322dba0..724402533038d4 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -756,6 +756,17 @@ def test_argv0_normalization(self): self.assertEqual(proc.returncode, 0, proc) self.assertEqual(proc.stdout.strip(), b'0') + def test_parsing_error(self): + args = [sys.executable, '-I', '--unknown-option'] + proc = subprocess.run(args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True) + err_msg = "unknown option --unknown-option\nusage: " + self.assertTrue(proc.stderr.startswith(err_msg), proc.stderr) + self.assertNotEqual(proc.returncode, 0) + + @unittest.skipIf(interpreter_requires_environment(), 'Cannot run -I tests when PYTHON env vars are required.') class IgnoreEnvironmentTest(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst new file mode 100644 index 00000000000000..19b8888230c659 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst @@ -0,0 +1,2 @@ +Fix command line argument parsing: no longer write errors multiple times +into stderr. diff --git a/Python/getopt.c b/Python/getopt.c index 708d9ce496287c..2e3891aae2d16a 100644 --- a/Python/getopt.c +++ b/Python/getopt.c @@ -101,7 +101,9 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) if (option == L'-') { // Parse long option. if (*opt_ptr == L'\0') { - fprintf(stderr, "expected long option\n"); + if (_PyOS_opterr) { + fprintf(stderr, "expected long option\n"); + } return -1; } *longindex = 0; @@ -111,7 +113,9 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) break; } if (!opt->name) { - fprintf(stderr, "unknown option %ls\n", argv[_PyOS_optind - 1]); + if (_PyOS_opterr) { + fprintf(stderr, "unknown option %ls\n", argv[_PyOS_optind - 1]); + } return '_'; } opt_ptr = L""; @@ -119,8 +123,10 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) return opt->val; } if (_PyOS_optind >= argc) { - fprintf(stderr, "Argument expected for the %ls options\n", - argv[_PyOS_optind - 1]); + if (_PyOS_opterr) { + fprintf(stderr, "Argument expected for the %ls options\n", + argv[_PyOS_optind - 1]); + } return '_'; } _PyOS_optarg = argv[_PyOS_optind++]; @@ -128,14 +134,16 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) } if (option == 'J') { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "-J is reserved for Jython\n"); + } return '_'; } if ((ptr = wcschr(SHORT_OPTS, option)) == NULL) { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "Unknown option: -%c\n", (char)option); + } return '_'; } @@ -147,9 +155,10 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) else { if (_PyOS_optind >= argc) { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "Argument expected for the -%c option\n", (char)option); + } return '_'; } From 091951a67c832db83c60f4eb22f1fb474b70e635 Mon Sep 17 00:00:00 2001 From: Batuhan Taskaya Date: Wed, 6 May 2020 17:29:32 +0300 Subject: [PATCH 024/115] bpo-40528: Improve and clear several aspects of the ASDL definition code for the AST (GH-19952) --- Include/asdl.h | 2 - Lib/test/test_ast.py | 2 +- Parser/Python.asdl | 4 +- Parser/asdl.py | 3 +- Parser/asdl_c.py | 10 +-- Python/Python-ast.c | 155 +++++++++++++++++++------------------------ 6 files changed, 75 insertions(+), 101 deletions(-) diff --git a/Include/asdl.h b/Include/asdl.h index 549df2ace7555e..e962560bcd4cbe 100644 --- a/Include/asdl.h +++ b/Include/asdl.h @@ -4,9 +4,7 @@ typedef PyObject * identifier; typedef PyObject * string; -typedef PyObject * bytes; typedef PyObject * object; -typedef PyObject * singleton; typedef PyObject * constant; /* It would be nice if the code generated by asdl_c.py was completely diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index a8a13fdcd7426e..6b71adac4e4a6b 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -597,7 +597,7 @@ def test_empty_yield_from(self): empty_yield_from.body[0].body[0].value.value = None with self.assertRaises(ValueError) as cm: compile(empty_yield_from, "", "exec") - self.assertIn("field value is required", str(cm.exception)) + self.assertIn("field 'value' is required", str(cm.exception)) @support.cpython_only def test_issue31592(self): diff --git a/Parser/Python.asdl b/Parser/Python.asdl index f789f1da456e91..889712b4b3d36e 100644 --- a/Parser/Python.asdl +++ b/Parser/Python.asdl @@ -1,5 +1,5 @@ --- ASDL's 5 builtin types are: --- identifier, int, string, object, constant +-- ASDL's 4 builtin types are: +-- identifier, int, string, constant module Python { diff --git a/Parser/asdl.py b/Parser/asdl.py index 5416377100c64a..7f509488b96ed3 100644 --- a/Parser/asdl.py +++ b/Parser/asdl.py @@ -33,8 +33,7 @@ # See the EBNF at the top of the file to understand the logical connection # between the various node types. -builtin_types = {'identifier', 'string', 'bytes', 'int', 'object', 'singleton', - 'constant'} +builtin_types = {'identifier', 'string', 'int', 'constant'} class AST: def __repr__(self): diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index c98f949042f306..59bf03ef8df3d3 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -323,7 +323,7 @@ def emit(s, depth=0, reflow=True): if not opt and argtype != "int": emit("if (!%s) {" % argname, 1) emit("PyErr_SetString(PyExc_ValueError,", 2) - msg = "field %s is required for %s" % (argname, name) + msg = "field '%s' is required for %s" % (argname, name) emit(' "%s");' % msg, 2, reflow=False) emit('return NULL;', 2) @@ -853,11 +853,9 @@ def visitModule(self, mod): Py_INCREF((PyObject*)o); return (PyObject*)o; } -#define ast2obj_singleton ast2obj_object #define ast2obj_constant ast2obj_object #define ast2obj_identifier ast2obj_object #define ast2obj_string ast2obj_object -#define ast2obj_bytes ast2obj_object static PyObject* ast2obj_int(long b) { @@ -1147,12 +1145,8 @@ def simpleSum(self, sum, name): self.emit("case %s:" % t.name, 2) self.emit("Py_INCREF(astmodulestate_global->%s_singleton);" % t.name, 3) self.emit("return astmodulestate_global->%s_singleton;" % t.name, 3) - self.emit("default:", 2) - self.emit('/* should never happen, but just in case ... */', 3) - code = "PyErr_Format(PyExc_SystemError, \"unknown %s found\");" % name - self.emit(code, 3, reflow=False) - self.emit("return NULL;", 3) self.emit("}", 1) + self.emit("Py_UNREACHABLE();", 1); self.emit("}", 0) def visitProduct(self, prod, name): diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 80f91646fd62e4..f34b1450c66ef1 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -1294,11 +1294,9 @@ static PyObject* ast2obj_object(void *o) Py_INCREF((PyObject*)o); return (PyObject*)o; } -#define ast2obj_singleton ast2obj_object #define ast2obj_constant ast2obj_object #define ast2obj_identifier ast2obj_object #define ast2obj_string ast2obj_object -#define ast2obj_bytes ast2obj_object static PyObject* ast2obj_int(long b) { @@ -2077,7 +2075,7 @@ Expression(expr_ty body, PyArena *arena) mod_ty p; if (!body) { PyErr_SetString(PyExc_ValueError, - "field body is required for Expression"); + "field 'body' is required for Expression"); return NULL; } p = (mod_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2094,7 +2092,7 @@ FunctionType(asdl_seq * argtypes, expr_ty returns, PyArena *arena) mod_ty p; if (!returns) { PyErr_SetString(PyExc_ValueError, - "field returns is required for FunctionType"); + "field 'returns' is required for FunctionType"); return NULL; } p = (mod_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2114,12 +2112,12 @@ FunctionDef(identifier name, arguments_ty args, asdl_seq * body, asdl_seq * stmt_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for FunctionDef"); + "field 'name' is required for FunctionDef"); return NULL; } if (!args) { PyErr_SetString(PyExc_ValueError, - "field args is required for FunctionDef"); + "field 'args' is required for FunctionDef"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2148,12 +2146,12 @@ AsyncFunctionDef(identifier name, arguments_ty args, asdl_seq * body, asdl_seq stmt_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for AsyncFunctionDef"); + "field 'name' is required for AsyncFunctionDef"); return NULL; } if (!args) { PyErr_SetString(PyExc_ValueError, - "field args is required for AsyncFunctionDef"); + "field 'args' is required for AsyncFunctionDef"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2181,7 +2179,7 @@ ClassDef(identifier name, asdl_seq * bases, asdl_seq * keywords, asdl_seq * stmt_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for ClassDef"); + "field 'name' is required for ClassDef"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2241,7 +2239,7 @@ Assign(asdl_seq * targets, expr_ty value, string type_comment, int lineno, int stmt_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Assign"); + "field 'value' is required for Assign"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2265,17 +2263,17 @@ AugAssign(expr_ty target, operator_ty op, expr_ty value, int lineno, int stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for AugAssign"); + "field 'target' is required for AugAssign"); return NULL; } if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for AugAssign"); + "field 'op' is required for AugAssign"); return NULL; } if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for AugAssign"); + "field 'value' is required for AugAssign"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2300,12 +2298,12 @@ AnnAssign(expr_ty target, expr_ty annotation, expr_ty value, int simple, int stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for AnnAssign"); + "field 'target' is required for AnnAssign"); return NULL; } if (!annotation) { PyErr_SetString(PyExc_ValueError, - "field annotation is required for AnnAssign"); + "field 'annotation' is required for AnnAssign"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2331,12 +2329,12 @@ For(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, string stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for For"); + "field 'target' is required for For"); return NULL; } if (!iter) { PyErr_SetString(PyExc_ValueError, - "field iter is required for For"); + "field 'iter' is required for For"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2363,12 +2361,12 @@ AsyncFor(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for AsyncFor"); + "field 'target' is required for AsyncFor"); return NULL; } if (!iter) { PyErr_SetString(PyExc_ValueError, - "field iter is required for AsyncFor"); + "field 'iter' is required for AsyncFor"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2394,7 +2392,7 @@ While(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno, int stmt_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for While"); + "field 'test' is required for While"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2418,7 +2416,7 @@ If(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno, int stmt_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for If"); + "field 'test' is required for If"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2519,7 +2517,7 @@ Assert(expr_ty test, expr_ty msg, int lineno, int col_offset, int end_lineno, stmt_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for Assert"); + "field 'test' is required for Assert"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2612,7 +2610,7 @@ Expr(expr_ty value, int lineno, int col_offset, int end_lineno, int stmt_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Expr"); + "field 'value' is required for Expr"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2682,7 +2680,7 @@ BoolOp(boolop_ty op, asdl_seq * values, int lineno, int col_offset, int expr_ty p; if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for BoolOp"); + "field 'op' is required for BoolOp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2705,12 +2703,12 @@ NamedExpr(expr_ty target, expr_ty value, int lineno, int col_offset, int expr_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for NamedExpr"); + "field 'target' is required for NamedExpr"); return NULL; } if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for NamedExpr"); + "field 'value' is required for NamedExpr"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2733,17 +2731,17 @@ BinOp(expr_ty left, operator_ty op, expr_ty right, int lineno, int col_offset, expr_ty p; if (!left) { PyErr_SetString(PyExc_ValueError, - "field left is required for BinOp"); + "field 'left' is required for BinOp"); return NULL; } if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for BinOp"); + "field 'op' is required for BinOp"); return NULL; } if (!right) { PyErr_SetString(PyExc_ValueError, - "field right is required for BinOp"); + "field 'right' is required for BinOp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2767,12 +2765,12 @@ UnaryOp(unaryop_ty op, expr_ty operand, int lineno, int col_offset, int expr_ty p; if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for UnaryOp"); + "field 'op' is required for UnaryOp"); return NULL; } if (!operand) { PyErr_SetString(PyExc_ValueError, - "field operand is required for UnaryOp"); + "field 'operand' is required for UnaryOp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2795,12 +2793,12 @@ Lambda(arguments_ty args, expr_ty body, int lineno, int col_offset, int expr_ty p; if (!args) { PyErr_SetString(PyExc_ValueError, - "field args is required for Lambda"); + "field 'args' is required for Lambda"); return NULL; } if (!body) { PyErr_SetString(PyExc_ValueError, - "field body is required for Lambda"); + "field 'body' is required for Lambda"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2823,17 +2821,17 @@ IfExp(expr_ty test, expr_ty body, expr_ty orelse, int lineno, int col_offset, expr_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for IfExp"); + "field 'test' is required for IfExp"); return NULL; } if (!body) { PyErr_SetString(PyExc_ValueError, - "field body is required for IfExp"); + "field 'body' is required for IfExp"); return NULL; } if (!orelse) { PyErr_SetString(PyExc_ValueError, - "field orelse is required for IfExp"); + "field 'orelse' is required for IfExp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2892,7 +2890,7 @@ ListComp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset, int expr_ty p; if (!elt) { PyErr_SetString(PyExc_ValueError, - "field elt is required for ListComp"); + "field 'elt' is required for ListComp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2915,7 +2913,7 @@ SetComp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset, int expr_ty p; if (!elt) { PyErr_SetString(PyExc_ValueError, - "field elt is required for SetComp"); + "field 'elt' is required for SetComp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2938,12 +2936,12 @@ DictComp(expr_ty key, expr_ty value, asdl_seq * generators, int lineno, int expr_ty p; if (!key) { PyErr_SetString(PyExc_ValueError, - "field key is required for DictComp"); + "field 'key' is required for DictComp"); return NULL; } if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for DictComp"); + "field 'value' is required for DictComp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2967,7 +2965,7 @@ GeneratorExp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset, expr_ty p; if (!elt) { PyErr_SetString(PyExc_ValueError, - "field elt is required for GeneratorExp"); + "field 'elt' is required for GeneratorExp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2990,7 +2988,7 @@ Await(expr_ty value, int lineno, int col_offset, int end_lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Await"); + "field 'value' is required for Await"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3029,7 +3027,7 @@ YieldFrom(expr_ty value, int lineno, int col_offset, int end_lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for YieldFrom"); + "field 'value' is required for YieldFrom"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3051,7 +3049,7 @@ Compare(expr_ty left, asdl_int_seq * ops, asdl_seq * comparators, int lineno, expr_ty p; if (!left) { PyErr_SetString(PyExc_ValueError, - "field left is required for Compare"); + "field 'left' is required for Compare"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3075,7 +3073,7 @@ Call(expr_ty func, asdl_seq * args, asdl_seq * keywords, int lineno, int expr_ty p; if (!func) { PyErr_SetString(PyExc_ValueError, - "field func is required for Call"); + "field 'func' is required for Call"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3100,7 +3098,7 @@ FormattedValue(expr_ty value, int conversion, expr_ty format_spec, int lineno, expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for FormattedValue"); + "field 'value' is required for FormattedValue"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3141,7 +3139,7 @@ Constant(constant value, string kind, int lineno, int col_offset, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Constant"); + "field 'value' is required for Constant"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3164,17 +3162,17 @@ Attribute(expr_ty value, identifier attr, expr_context_ty ctx, int lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Attribute"); + "field 'value' is required for Attribute"); return NULL; } if (!attr) { PyErr_SetString(PyExc_ValueError, - "field attr is required for Attribute"); + "field 'attr' is required for Attribute"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Attribute"); + "field 'ctx' is required for Attribute"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3198,17 +3196,17 @@ Subscript(expr_ty value, expr_ty slice, expr_context_ty ctx, int lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Subscript"); + "field 'value' is required for Subscript"); return NULL; } if (!slice) { PyErr_SetString(PyExc_ValueError, - "field slice is required for Subscript"); + "field 'slice' is required for Subscript"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Subscript"); + "field 'ctx' is required for Subscript"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3232,12 +3230,12 @@ Starred(expr_ty value, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Starred"); + "field 'value' is required for Starred"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Starred"); + "field 'ctx' is required for Starred"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3260,12 +3258,12 @@ Name(identifier id, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!id) { PyErr_SetString(PyExc_ValueError, - "field id is required for Name"); + "field 'id' is required for Name"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Name"); + "field 'ctx' is required for Name"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3288,7 +3286,7 @@ List(asdl_seq * elts, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for List"); + "field 'ctx' is required for List"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3311,7 +3309,7 @@ Tuple(asdl_seq * elts, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Tuple"); + "field 'ctx' is required for Tuple"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3353,12 +3351,12 @@ comprehension(expr_ty target, expr_ty iter, asdl_seq * ifs, int is_async, comprehension_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for comprehension"); + "field 'target' is required for comprehension"); return NULL; } if (!iter) { PyErr_SetString(PyExc_ValueError, - "field iter is required for comprehension"); + "field 'iter' is required for comprehension"); return NULL; } p = (comprehension_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3416,7 +3414,7 @@ arg(identifier arg, expr_ty annotation, string type_comment, int lineno, int arg_ty p; if (!arg) { PyErr_SetString(PyExc_ValueError, - "field arg is required for arg"); + "field 'arg' is required for arg"); return NULL; } p = (arg_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3439,7 +3437,7 @@ keyword(identifier arg, expr_ty value, int lineno, int col_offset, int keyword_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for keyword"); + "field 'value' is required for keyword"); return NULL; } p = (keyword_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3460,7 +3458,7 @@ alias(identifier name, identifier asname, PyArena *arena) alias_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for alias"); + "field 'name' is required for alias"); return NULL; } p = (alias_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3477,7 +3475,7 @@ withitem(expr_ty context_expr, expr_ty optional_vars, PyArena *arena) withitem_ty p; if (!context_expr) { PyErr_SetString(PyExc_ValueError, - "field context_expr is required for withitem"); + "field 'context_expr' is required for withitem"); return NULL; } p = (withitem_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3494,7 +3492,7 @@ TypeIgnore(int lineno, string tag, PyArena *arena) type_ignore_ty p; if (!tag) { PyErr_SetString(PyExc_ValueError, - "field tag is required for TypeIgnore"); + "field 'tag' is required for TypeIgnore"); return NULL; } p = (type_ignore_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -4602,11 +4600,8 @@ PyObject* ast2obj_expr_context(expr_context_ty o) case Del: Py_INCREF(astmodulestate_global->Del_singleton); return astmodulestate_global->Del_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown expr_context found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_boolop(boolop_ty o) { @@ -4617,11 +4612,8 @@ PyObject* ast2obj_boolop(boolop_ty o) case Or: Py_INCREF(astmodulestate_global->Or_singleton); return astmodulestate_global->Or_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown boolop found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_operator(operator_ty o) { @@ -4665,11 +4657,8 @@ PyObject* ast2obj_operator(operator_ty o) case FloorDiv: Py_INCREF(astmodulestate_global->FloorDiv_singleton); return astmodulestate_global->FloorDiv_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown operator found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_unaryop(unaryop_ty o) { @@ -4686,11 +4675,8 @@ PyObject* ast2obj_unaryop(unaryop_ty o) case USub: Py_INCREF(astmodulestate_global->USub_singleton); return astmodulestate_global->USub_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown unaryop found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_cmpop(cmpop_ty o) { @@ -4725,11 +4711,8 @@ PyObject* ast2obj_cmpop(cmpop_ty o) case NotIn: Py_INCREF(astmodulestate_global->NotIn_singleton); return astmodulestate_global->NotIn_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown cmpop found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_comprehension(void* _o) From 89fc4a34cf7a01df9dd269d32d3706c68a72d130 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 6 May 2020 18:23:58 +0200 Subject: [PATCH 025/115] bpo-40521: Disable method cache in subinterpreters (GH-19960) When Python is built with experimental isolated interpreters, disable the type method cache. Temporary workaround until the cache is made per-interpreter. --- Objects/typeobject.c | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 1565b90898605e..0d5600b4ce4faf 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -19,6 +19,12 @@ class object "PyObject *" "&PyBaseObject_Type" #include "clinic/typeobject.c.h" +/* bpo-40521: Type method cache is shared by all subinterpreters */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define MCACHE +#endif + +#ifdef MCACHE /* Support type attribute cache */ /* The cache can keep references to the names alive for longer than @@ -47,6 +53,7 @@ struct method_cache_entry { static struct method_cache_entry method_cache[1 << MCACHE_SIZE_EXP]; static unsigned int next_version_tag = 0; +#endif #define MCACHE_STATS 0 @@ -216,6 +223,7 @@ _PyType_GetTextSignatureFromInternalDoc(const char *name, const char *internal_d unsigned int PyType_ClearCache(void) { +#ifdef MCACHE Py_ssize_t i; unsigned int cur_version_tag = next_version_tag - 1; @@ -240,6 +248,9 @@ PyType_ClearCache(void) /* mark all version tags as invalid */ PyType_Modified(&PyBaseObject_Type); return cur_version_tag; +#else + return 0; +#endif } void @@ -350,6 +361,7 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) { Py_TPFLAGS_VALID_VERSION_TAG); } +#ifdef MCACHE static int assign_version_tag(PyTypeObject *type) { @@ -396,6 +408,7 @@ assign_version_tag(PyTypeObject *type) type->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG; return 1; } +#endif static PyMemberDef type_members[] = { @@ -3232,12 +3245,12 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) { PyObject *res; int error; - unsigned int h; +#ifdef MCACHE if (MCACHE_CACHEABLE_NAME(name) && _PyType_HasFeature(type, Py_TPFLAGS_VALID_VERSION_TAG)) { /* fast path */ - h = MCACHE_HASH_METHOD(type, name); + unsigned int h = MCACHE_HASH_METHOD(type, name); if (method_cache[h].version == type->tp_version_tag && method_cache[h].name == name) { #if MCACHE_STATS @@ -3246,6 +3259,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) return method_cache[h].value; } } +#endif /* We may end up clearing live exceptions below, so make sure it's ours. */ assert(!PyErr_Occurred()); @@ -3267,8 +3281,9 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) return NULL; } +#ifdef MCACHE if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(type)) { - h = MCACHE_HASH_METHOD(type, name); + unsigned int h = MCACHE_HASH_METHOD(type, name); method_cache[h].version = type->tp_version_tag; method_cache[h].value = res; /* borrowed */ Py_INCREF(name); @@ -3281,6 +3296,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) #endif Py_SETREF(method_cache[h].name, name); } +#endif return res; } From d8135e913ab7c694db247c86d0a84c450c32d86e Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 6 May 2020 18:25:06 +0200 Subject: [PATCH 026/115] bpo-40533: Disable GC in subinterpreters (GH-19961) When Python is built with experimental isolated interpreters, a garbage collection now does nothing in an isolated interpreter. Temporary workaround until subinterpreters stop sharing Python objects. --- Modules/gcmodule.c | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 56dcb101e0005e..a44752b1cc4da7 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1181,6 +1181,14 @@ collect(PyThreadState *tstate, int generation, _PyTime_t t1 = 0; /* initialize to prevent a compiler warning */ GCState *gcstate = &tstate->interp->gc; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + if (tstate->interp->config._isolated_interpreter) { + // bpo-40533: The garbage collector must not be run on parallel on + // Python objects shared by multiple interpreters. + return 0; + } +#endif + if (gcstate->debug & DEBUG_STATS) { PySys_WriteStderr("gc: collecting generation %d...\n", generation); show_stats_each_generations(gcstate); From b7aa23d29fa48238dab3692d02e1f0a7e8a5af9c Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 6 May 2020 19:05:27 +0200 Subject: [PATCH 027/115] bpo-40521: Disable list free list in subinterpreters (GH-19959) When Python is built with experimental isolated interpreters, disable the list free list. Temporary workaround until this cache is made per-interpreter. --- Objects/listobject.c | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/Objects/listobject.c b/Objects/listobject.c index 904bea317c9da8..37fadca129ac02 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -98,8 +98,15 @@ list_preallocate_exact(PyListObject *self, Py_ssize_t size) /* Empty list reuse scheme to save calls to malloc and free */ #ifndef PyList_MAXFREELIST -#define PyList_MAXFREELIST 80 +# define PyList_MAXFREELIST 80 #endif + +/* bpo-40521: list free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyList_MAXFREELIST +# define PyList_MAXFREELIST 0 +#endif + static PyListObject *free_list[PyList_MAXFREELIST]; static int numfree = 0; From 999ec9ab6af536cc2666a0847ec02331aaf00416 Mon Sep 17 00:00:00 2001 From: Lysandros Nikolaou Date: Wed, 6 May 2020 21:11:04 +0300 Subject: [PATCH 028/115] bpo-40334: Add type to the assignment rule in the grammar file (GH-19963) --- Grammar/python.gram | 2 +- Parser/pegen/parse.c | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Grammar/python.gram b/Grammar/python.gram index 0ce6ab4b4ba908..3f16768198f9da 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -82,7 +82,7 @@ compound_stmt[stmt_ty]: | &'while' while_stmt # NOTE: annotated_rhs may start with 'yield'; yield_expr must start with 'yield' -assignment: +assignment[stmt_ty]: | a=NAME ':' b=expression c=['=' d=annotated_rhs { d }] { CHECK_VERSION( 6, diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 55605d5770f1e6..3b518ee263777b 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -378,7 +378,7 @@ static asdl_seq* statement_newline_rule(Parser *p); static asdl_seq* simple_stmt_rule(Parser *p); static stmt_ty small_stmt_rule(Parser *p); static stmt_ty compound_stmt_rule(Parser *p); -static void *assignment_rule(Parser *p); +static stmt_ty assignment_rule(Parser *p); static AugOperator* augassign_rule(Parser *p); static stmt_ty global_stmt_rule(Parser *p); static stmt_ty nonlocal_stmt_rule(Parser *p); @@ -1256,7 +1256,7 @@ small_stmt_rule(Parser *p) int start_col_offset = p->tokens[mark]->col_offset; UNUSED(start_col_offset); // Only used by EXTRA macro { // assignment - void *assignment_var; + stmt_ty assignment_var; if ( (assignment_var = assignment_rule(p)) ) @@ -1586,13 +1586,13 @@ compound_stmt_rule(Parser *p) // | ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? // | target augassign (yield_expr | star_expressions) // | invalid_assignment -static void * +static stmt_ty assignment_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; + stmt_ty res = NULL; int mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; From 441416c9a06f11f28e17d56c915ea6116c0c9ea7 Mon Sep 17 00:00:00 2001 From: Naglis Date: Wed, 6 May 2020 19:51:43 +0000 Subject: [PATCH 029/115] Fix typo in sqlite3 documentation (GH-19965) *first* is repeated twice. --- Doc/library/sqlite3.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index 314d3a58e2759a..ccb82278bdaa13 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -928,7 +928,7 @@ a class like this:: self.x, self.y = x, y Now you want to store the point in a single SQLite column. First you'll have to -choose one of the supported types first to be used for representing the point. +choose one of the supported types to be used for representing the point. Let's just use str and separate the coordinates using a semicolon. Then you need to give your class a method ``__conform__(self, protocol)`` which must return the converted value. The parameter *protocol* will be :class:`PrepareProtocol`. From 99db2a1db7a9b468a0ce8377d579f78fa03a2a34 Mon Sep 17 00:00:00 2001 From: Pablo Galindo Date: Wed, 6 May 2020 22:54:34 +0100 Subject: [PATCH 030/115] bpo-40334: Allow trailing comma in parenthesised context managers (GH-19964) --- Grammar/python.gram | 4 +-- Lib/test/test_grammar.py | 66 +++++++++++++++++++++++++++++++++++++++- Parser/pegen/parse.c | 16 +++++++--- 3 files changed, 79 insertions(+), 7 deletions(-) diff --git a/Grammar/python.gram b/Grammar/python.gram index 3f16768198f9da..3d8a39b1d59066 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -170,11 +170,11 @@ for_stmt[stmt_ty]: CHECK_VERSION(5, "Async for loops are", _Py_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) } with_stmt[stmt_ty]: - | 'with' '(' a=','.with_item+ ')' ':' b=block { + | 'with' '(' a=','.with_item+ ','? ')' ':' b=block { _Py_With(a, b, NULL, EXTRA) } | 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { _Py_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | ASYNC 'with' '(' a=','.with_item+ ')' ':' b=block { + | ASYNC 'with' '(' a=','.with_item+ ','? ')' ':' b=block { CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NULL, EXTRA)) } | ASYNC 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) } diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index 922a5166ec12f7..c24d3529490be0 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -1,7 +1,7 @@ # Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. -from test.support import check_syntax_error, check_syntax_warning +from test.support import check_syntax_error, check_syntax_warning, use_old_parser import inspect import unittest import sys @@ -1694,6 +1694,70 @@ def __exit__(self, *args): with manager() as x, manager(): pass + if not use_old_parser(): + test_cases = [ + """if 1: + with ( + manager() + ): + pass + """, + """if 1: + with ( + manager() as x + ): + pass + """, + """if 1: + with ( + manager() as (x, y), + manager() as z, + ): + pass + """, + """if 1: + with ( + manager(), + manager() + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y, + manager() as z, + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y, + manager(), + ): + pass + """, + ] + for case in test_cases: + with self.subTest(case=case): + compile(case, "", "exec") + + def test_if_else_expr(self): # Test ifelse expressions in various cases def _checkeval(msg, ret): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 3b518ee263777b..d86390839d528e 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -3031,9 +3031,9 @@ for_stmt_rule(Parser *p) } // with_stmt: -// | 'with' '(' ','.with_item+ ')' ':' block +// | 'with' '(' ','.with_item+ ','? ')' ':' block // | 'with' ','.with_item+ ':' TYPE_COMMENT? block -// | ASYNC 'with' '(' ','.with_item+ ')' ':' block +// | ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block // | ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block static stmt_ty with_stmt_rule(Parser *p) @@ -3051,13 +3051,15 @@ with_stmt_rule(Parser *p) UNUSED(start_lineno); // Only used by EXTRA macro int start_col_offset = p->tokens[mark]->col_offset; UNUSED(start_col_offset); // Only used by EXTRA macro - { // 'with' '(' ','.with_item+ ')' ':' block + { // 'with' '(' ','.with_item+ ','? ')' ':' block asdl_seq * a; asdl_seq* b; Token * keyword; Token * literal; Token * literal_1; Token * literal_2; + void *opt_var; + UNUSED(opt_var); // Silence compiler warnings if ( (keyword = _PyPegen_expect_token(p, 519)) && @@ -3065,6 +3067,8 @@ with_stmt_rule(Parser *p) && (a = _gather_38_rule(p)) && + (opt_var = _PyPegen_expect_token(p, 12), 1) + && (literal_1 = _PyPegen_expect_token(p, 8)) && (literal_2 = _PyPegen_expect_token(p, 11)) @@ -3124,7 +3128,7 @@ with_stmt_rule(Parser *p) } p->mark = mark; } - { // ASYNC 'with' '(' ','.with_item+ ')' ':' block + { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block asdl_seq * a; Token * async_var; asdl_seq* b; @@ -3132,6 +3136,8 @@ with_stmt_rule(Parser *p) Token * literal; Token * literal_1; Token * literal_2; + void *opt_var; + UNUSED(opt_var); // Silence compiler warnings if ( (async_var = _PyPegen_expect_token(p, ASYNC)) && @@ -3141,6 +3147,8 @@ with_stmt_rule(Parser *p) && (a = _gather_42_rule(p)) && + (opt_var = _PyPegen_expect_token(p, 12), 1) + && (literal_1 = _PyPegen_expect_token(p, 8)) && (literal_2 = _PyPegen_expect_token(p, 11)) From 470aac4d8e76556bd8f820f3f3928dca2b4d2849 Mon Sep 17 00:00:00 2001 From: Pablo Galindo Date: Wed, 6 May 2020 23:14:43 +0100 Subject: [PATCH 031/115] bpo-40334: Generate comments in the parser code to improve debugging (GH-19966) --- Parser/pegen/parse.c | 2148 +++++++++++----------- Tools/peg_generator/pegen/c_generator.py | 14 + 2 files changed, 1088 insertions(+), 1074 deletions(-) diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index d86390839d528e..b1da16640aa6e1 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -678,9 +678,9 @@ file_rule(Parser *p) void *a; Token * endmarker_var; if ( - (a = statements_rule(p), 1) + (a = statements_rule(p), 1) // statements? && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { res = _PyPegen_make_module ( p , a ); @@ -709,7 +709,7 @@ interactive_rule(Parser *p) { // statement_newline asdl_seq* a; if ( - (a = statement_newline_rule(p)) + (a = statement_newline_rule(p)) // statement_newline ) { res = Interactive ( a , p -> arena ); @@ -740,11 +740,11 @@ eval_rule(Parser *p) expr_ty a; Token * endmarker_var; if ( - (a = expressions_rule(p)) + (a = expressions_rule(p)) // expressions && - (_loop0_1_var = _loop0_1_rule(p)) + (_loop0_1_var = _loop0_1_rule(p)) // NEWLINE* && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { res = Expression ( a , p -> arena ); @@ -779,19 +779,19 @@ func_type_rule(Parser *p) Token * literal_1; Token * literal_2; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = type_expressions_rule(p), 1) + (a = type_expressions_rule(p), 1) // type_expressions? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 51)) + (literal_2 = _PyPegen_expect_token(p, 51)) // token='->' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (_loop0_2_var = _loop0_2_rule(p)) + (_loop0_2_var = _loop0_2_rule(p)) // NEWLINE* && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { res = FunctionType ( a , b , p -> arena ); @@ -820,7 +820,7 @@ fstring_rule(Parser *p) { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { res = star_expressions_var; @@ -858,19 +858,19 @@ type_expressions_rule(Parser *p) Token * literal_2; Token * literal_3; if ( - (a = _gather_3_rule(p)) + (a = _gather_3_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) + (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (literal_2 = _PyPegen_expect_token(p, 12)) + (literal_2 = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_3 = _PyPegen_expect_token(p, 35)) + (literal_3 = _PyPegen_expect_token(p, 35)) // token='**' && - (c = expression_rule(p)) + (c = expression_rule(p)) // expression ) { res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); @@ -888,13 +888,13 @@ type_expressions_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _gather_5_rule(p)) + (a = _gather_5_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) + (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { res = _PyPegen_seq_append_to_end ( p , a , b ); @@ -912,13 +912,13 @@ type_expressions_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _gather_7_rule(p)) + (a = _gather_7_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 35)) + (literal_1 = _PyPegen_expect_token(p, 35)) // token='**' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { res = _PyPegen_seq_append_to_end ( p , a , b ); @@ -937,15 +937,15 @@ type_expressions_rule(Parser *p) Token * literal_1; Token * literal_2; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_2 = _PyPegen_expect_token(p, 35)) + (literal_2 = _PyPegen_expect_token(p, 35)) // token='**' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); @@ -961,9 +961,9 @@ type_expressions_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { res = _PyPegen_singleton_seq ( p , a ); @@ -979,9 +979,9 @@ type_expressions_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { res = _PyPegen_singleton_seq ( p , a ); @@ -996,7 +996,7 @@ type_expressions_rule(Parser *p) { // ','.expression+ asdl_seq * _gather_9_var; if ( - (_gather_9_var = _gather_9_rule(p)) + (_gather_9_var = _gather_9_rule(p)) // ','.expression+ ) { res = _gather_9_var; @@ -1021,7 +1021,7 @@ statements_rule(Parser *p) { // statement+ asdl_seq * a; if ( - (a = _loop1_11_rule(p)) + (a = _loop1_11_rule(p)) // statement+ ) { res = _PyPegen_seq_flatten ( p , a ); @@ -1050,7 +1050,7 @@ statement_rule(Parser *p) { // compound_stmt stmt_ty a; if ( - (a = compound_stmt_rule(p)) + (a = compound_stmt_rule(p)) // compound_stmt ) { res = _PyPegen_singleton_seq ( p , a ); @@ -1065,7 +1065,7 @@ statement_rule(Parser *p) { // simple_stmt asdl_seq* simple_stmt_var; if ( - (simple_stmt_var = simple_stmt_rule(p)) + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { res = simple_stmt_var; @@ -1099,9 +1099,9 @@ statement_newline_rule(Parser *p) stmt_ty a; Token * newline_var; if ( - (a = compound_stmt_rule(p)) + (a = compound_stmt_rule(p)) // compound_stmt && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = _PyPegen_singleton_seq ( p , a ); @@ -1116,7 +1116,7 @@ statement_newline_rule(Parser *p) { // simple_stmt asdl_seq* simple_stmt_var; if ( - (simple_stmt_var = simple_stmt_rule(p)) + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { res = simple_stmt_var; @@ -1127,7 +1127,7 @@ statement_newline_rule(Parser *p) { // NEWLINE Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1150,7 +1150,7 @@ statement_newline_rule(Parser *p) { // $ Token * endmarker_var; if ( - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { res = _PyPegen_interactive_exit ( p ); @@ -1180,11 +1180,11 @@ simple_stmt_rule(Parser *p) stmt_ty a; Token * newline_var; if ( - (a = small_stmt_rule(p)) + (a = small_stmt_rule(p)) // small_stmt && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 13) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 13) // token=';' && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = _PyPegen_singleton_seq ( p , a ); @@ -1202,11 +1202,11 @@ simple_stmt_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_12_rule(p)) + (a = _gather_12_rule(p)) // ';'.small_stmt+ && - (opt_var = _PyPegen_expect_token(p, 13), 1) + (opt_var = _PyPegen_expect_token(p, 13), 1) // ';'? && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = a; @@ -1258,7 +1258,7 @@ small_stmt_rule(Parser *p) { // assignment stmt_ty assignment_var; if ( - (assignment_var = assignment_rule(p)) + (assignment_var = assignment_rule(p)) // assignment ) { res = assignment_var; @@ -1269,7 +1269,7 @@ small_stmt_rule(Parser *p) { // star_expressions expr_ty e; if ( - (e = star_expressions_rule(p)) + (e = star_expressions_rule(p)) // star_expressions ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1292,9 +1292,9 @@ small_stmt_rule(Parser *p) { // &'return' return_stmt stmt_ty return_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) // token='return' && - (return_stmt_var = return_stmt_rule(p)) + (return_stmt_var = return_stmt_rule(p)) // return_stmt ) { res = return_stmt_var; @@ -1307,7 +1307,7 @@ small_stmt_rule(Parser *p) if ( _PyPegen_lookahead(1, _tmp_14_rule, p) && - (import_stmt_var = import_stmt_rule(p)) + (import_stmt_var = import_stmt_rule(p)) // import_stmt ) { res = import_stmt_var; @@ -1318,9 +1318,9 @@ small_stmt_rule(Parser *p) { // &'raise' raise_stmt stmt_ty raise_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) // token='raise' && - (raise_stmt_var = raise_stmt_rule(p)) + (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt ) { res = raise_stmt_var; @@ -1331,7 +1331,7 @@ small_stmt_rule(Parser *p) { // 'pass' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 502)) + (keyword = _PyPegen_expect_token(p, 502)) // token='pass' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1354,9 +1354,9 @@ small_stmt_rule(Parser *p) { // &'del' del_stmt stmt_ty del_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) // token='del' && - (del_stmt_var = del_stmt_rule(p)) + (del_stmt_var = del_stmt_rule(p)) // del_stmt ) { res = del_stmt_var; @@ -1367,9 +1367,9 @@ small_stmt_rule(Parser *p) { // &'yield' yield_stmt stmt_ty yield_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) // token='yield' && - (yield_stmt_var = yield_stmt_rule(p)) + (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt ) { res = yield_stmt_var; @@ -1380,9 +1380,9 @@ small_stmt_rule(Parser *p) { // &'assert' assert_stmt stmt_ty assert_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) // token='assert' && - (assert_stmt_var = assert_stmt_rule(p)) + (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt ) { res = assert_stmt_var; @@ -1393,7 +1393,7 @@ small_stmt_rule(Parser *p) { // 'break' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 506)) + (keyword = _PyPegen_expect_token(p, 506)) // token='break' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1416,7 +1416,7 @@ small_stmt_rule(Parser *p) { // 'continue' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 507)) + (keyword = _PyPegen_expect_token(p, 507)) // token='continue' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1439,9 +1439,9 @@ small_stmt_rule(Parser *p) { // &'global' global_stmt stmt_ty global_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) // token='global' && - (global_stmt_var = global_stmt_rule(p)) + (global_stmt_var = global_stmt_rule(p)) // global_stmt ) { res = global_stmt_var; @@ -1452,9 +1452,9 @@ small_stmt_rule(Parser *p) { // &'nonlocal' nonlocal_stmt stmt_ty nonlocal_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) // token='nonlocal' && - (nonlocal_stmt_var = nonlocal_stmt_rule(p)) + (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt ) { res = nonlocal_stmt_var; @@ -1489,7 +1489,7 @@ compound_stmt_rule(Parser *p) if ( _PyPegen_lookahead(1, _tmp_15_rule, p) && - (function_def_var = function_def_rule(p)) + (function_def_var = function_def_rule(p)) // function_def ) { res = function_def_var; @@ -1500,9 +1500,9 @@ compound_stmt_rule(Parser *p) { // &'if' if_stmt stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) // token='if' && - (if_stmt_var = if_stmt_rule(p)) + (if_stmt_var = if_stmt_rule(p)) // if_stmt ) { res = if_stmt_var; @@ -1515,7 +1515,7 @@ compound_stmt_rule(Parser *p) if ( _PyPegen_lookahead(1, _tmp_16_rule, p) && - (class_def_var = class_def_rule(p)) + (class_def_var = class_def_rule(p)) // class_def ) { res = class_def_var; @@ -1528,7 +1528,7 @@ compound_stmt_rule(Parser *p) if ( _PyPegen_lookahead(1, _tmp_17_rule, p) && - (with_stmt_var = with_stmt_rule(p)) + (with_stmt_var = with_stmt_rule(p)) // with_stmt ) { res = with_stmt_var; @@ -1541,7 +1541,7 @@ compound_stmt_rule(Parser *p) if ( _PyPegen_lookahead(1, _tmp_18_rule, p) && - (for_stmt_var = for_stmt_rule(p)) + (for_stmt_var = for_stmt_rule(p)) // for_stmt ) { res = for_stmt_var; @@ -1552,9 +1552,9 @@ compound_stmt_rule(Parser *p) { // &'try' try_stmt stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) // token='try' && - (try_stmt_var = try_stmt_rule(p)) + (try_stmt_var = try_stmt_rule(p)) // try_stmt ) { res = try_stmt_var; @@ -1565,9 +1565,9 @@ compound_stmt_rule(Parser *p) { // &'while' while_stmt stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) // token='while' && - (while_stmt_var = while_stmt_rule(p)) + (while_stmt_var = while_stmt_rule(p)) // while_stmt ) { res = while_stmt_var; @@ -1608,13 +1608,13 @@ assignment_rule(Parser *p) void *c; Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (c = _tmp_19_rule(p), 1) + (c = _tmp_19_rule(p), 1) // ['=' annotated_rhs] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1640,13 +1640,13 @@ assignment_rule(Parser *p) void *c; Token * literal; if ( - (a = _tmp_20_rule(p)) + (a = _tmp_20_rule(p)) // '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (c = _tmp_21_rule(p), 1) + (c = _tmp_21_rule(p), 1) // ['=' annotated_rhs] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1671,11 +1671,11 @@ assignment_rule(Parser *p) void *b; void *tc; if ( - (a = _loop1_22_rule(p)) + (a = _loop1_22_rule(p)) // ((star_targets '='))+ && - (b = _tmp_23_rule(p)) + (b = _tmp_23_rule(p)) // yield_expr | star_expressions && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1700,11 +1700,11 @@ assignment_rule(Parser *p) AugOperator* b; void *c; if ( - (a = target_rule(p)) + (a = target_rule(p)) // target && - (b = augassign_rule(p)) + (b = augassign_rule(p)) // augassign && - (c = _tmp_24_rule(p)) + (c = _tmp_24_rule(p)) // yield_expr | star_expressions ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -1727,7 +1727,7 @@ assignment_rule(Parser *p) { // invalid_assignment void *invalid_assignment_var; if ( - (invalid_assignment_var = invalid_assignment_rule(p)) + (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment ) { res = invalid_assignment_var; @@ -1765,7 +1765,7 @@ augassign_rule(Parser *p) { // '+=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 36)) + (literal = _PyPegen_expect_token(p, 36)) // token='+=' ) { res = _PyPegen_augoperator ( p , Add ); @@ -1780,7 +1780,7 @@ augassign_rule(Parser *p) { // '-=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 37)) + (literal = _PyPegen_expect_token(p, 37)) // token='-=' ) { res = _PyPegen_augoperator ( p , Sub ); @@ -1795,7 +1795,7 @@ augassign_rule(Parser *p) { // '*=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 38)) + (literal = _PyPegen_expect_token(p, 38)) // token='*=' ) { res = _PyPegen_augoperator ( p , Mult ); @@ -1810,7 +1810,7 @@ augassign_rule(Parser *p) { // '@=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 50)) + (literal = _PyPegen_expect_token(p, 50)) // token='@=' ) { res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); @@ -1825,7 +1825,7 @@ augassign_rule(Parser *p) { // '/=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 39)) + (literal = _PyPegen_expect_token(p, 39)) // token='/=' ) { res = _PyPegen_augoperator ( p , Div ); @@ -1840,7 +1840,7 @@ augassign_rule(Parser *p) { // '%=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 40)) + (literal = _PyPegen_expect_token(p, 40)) // token='%=' ) { res = _PyPegen_augoperator ( p , Mod ); @@ -1855,7 +1855,7 @@ augassign_rule(Parser *p) { // '&=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 41)) + (literal = _PyPegen_expect_token(p, 41)) // token='&=' ) { res = _PyPegen_augoperator ( p , BitAnd ); @@ -1870,7 +1870,7 @@ augassign_rule(Parser *p) { // '|=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 42)) + (literal = _PyPegen_expect_token(p, 42)) // token='|=' ) { res = _PyPegen_augoperator ( p , BitOr ); @@ -1885,7 +1885,7 @@ augassign_rule(Parser *p) { // '^=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 43)) + (literal = _PyPegen_expect_token(p, 43)) // token='^=' ) { res = _PyPegen_augoperator ( p , BitXor ); @@ -1900,7 +1900,7 @@ augassign_rule(Parser *p) { // '<<=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 44)) + (literal = _PyPegen_expect_token(p, 44)) // token='<<=' ) { res = _PyPegen_augoperator ( p , LShift ); @@ -1915,7 +1915,7 @@ augassign_rule(Parser *p) { // '>>=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 45)) + (literal = _PyPegen_expect_token(p, 45)) // token='>>=' ) { res = _PyPegen_augoperator ( p , RShift ); @@ -1930,7 +1930,7 @@ augassign_rule(Parser *p) { // '**=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 46)) + (literal = _PyPegen_expect_token(p, 46)) // token='**=' ) { res = _PyPegen_augoperator ( p , Pow ); @@ -1945,7 +1945,7 @@ augassign_rule(Parser *p) { // '//=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 48)) + (literal = _PyPegen_expect_token(p, 48)) // token='//=' ) { res = _PyPegen_augoperator ( p , FloorDiv ); @@ -1983,9 +1983,9 @@ global_stmt_rule(Parser *p) asdl_seq * a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 508)) + (keyword = _PyPegen_expect_token(p, 508)) // token='global' && - (a = _gather_25_rule(p)) + (a = _gather_25_rule(p)) // ','.NAME+ ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2031,9 +2031,9 @@ nonlocal_stmt_rule(Parser *p) asdl_seq * a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 509)) + (keyword = _PyPegen_expect_token(p, 509)) // token='nonlocal' && - (a = _gather_27_rule(p)) + (a = _gather_27_rule(p)) // ','.NAME+ ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2078,7 +2078,7 @@ yield_stmt_rule(Parser *p) { // yield_expr expr_ty y; if ( - (y = yield_expr_rule(p)) + (y = yield_expr_rule(p)) // yield_expr ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2125,11 +2125,11 @@ assert_stmt_rule(Parser *p) void *b; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 505)) + (keyword = _PyPegen_expect_token(p, 505)) // token='assert' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = _tmp_29_rule(p), 1) + (b = _tmp_29_rule(p), 1) // [',' expression] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2175,9 +2175,9 @@ del_stmt_rule(Parser *p) asdl_seq* a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 503)) + (keyword = _PyPegen_expect_token(p, 503)) // token='del' && - (a = del_targets_rule(p)) + (a = del_targets_rule(p)) // del_targets ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2214,7 +2214,7 @@ import_stmt_rule(Parser *p) { // import_name stmt_ty import_name_var; if ( - (import_name_var = import_name_rule(p)) + (import_name_var = import_name_rule(p)) // import_name ) { res = import_name_var; @@ -2225,7 +2225,7 @@ import_stmt_rule(Parser *p) { // import_from stmt_ty import_from_var; if ( - (import_from_var = import_from_rule(p)) + (import_from_var = import_from_rule(p)) // import_from ) { res = import_from_var; @@ -2259,9 +2259,9 @@ import_name_rule(Parser *p) asdl_seq* a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 513)) + (keyword = _PyPegen_expect_token(p, 513)) // token='import' && - (a = dotted_as_names_rule(p)) + (a = dotted_as_names_rule(p)) // dotted_as_names ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2312,15 +2312,15 @@ import_from_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (a = _loop0_30_rule(p)) + (a = _loop0_30_rule(p)) // (('.' | '...'))* && - (b = dotted_name_rule(p)) + (b = dotted_name_rule(p)) // dotted_name && - (keyword_1 = _PyPegen_expect_token(p, 513)) + (keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && - (c = import_from_targets_rule(p)) + (c = import_from_targets_rule(p)) // import_from_targets ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2346,13 +2346,13 @@ import_from_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (a = _loop1_31_rule(p)) + (a = _loop1_31_rule(p)) // (('.' | '...'))+ && - (keyword_1 = _PyPegen_expect_token(p, 513)) + (keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && - (b = import_from_targets_rule(p)) + (b = import_from_targets_rule(p)) // import_from_targets ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2393,13 +2393,13 @@ import_from_targets_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = import_from_as_names_rule(p)) + (a = import_from_as_names_rule(p)) // import_from_as_names && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = a; @@ -2414,7 +2414,7 @@ import_from_targets_rule(Parser *p) { // import_from_as_names asdl_seq* import_from_as_names_var; if ( - (import_from_as_names_var = import_from_as_names_rule(p)) + (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names ) { res = import_from_as_names_var; @@ -2425,7 +2425,7 @@ import_from_targets_rule(Parser *p) { // '*' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' ) { res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); @@ -2454,7 +2454,7 @@ import_from_as_names_rule(Parser *p) { // ','.import_from_as_name+ asdl_seq * a; if ( - (a = _gather_32_rule(p)) + (a = _gather_32_rule(p)) // ','.import_from_as_name+ ) { res = a; @@ -2484,9 +2484,9 @@ import_from_as_name_rule(Parser *p) expr_ty a; void *b; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (b = _tmp_34_rule(p), 1) + (b = _tmp_34_rule(p), 1) // ['as' NAME] ) { res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); @@ -2515,7 +2515,7 @@ dotted_as_names_rule(Parser *p) { // ','.dotted_as_name+ asdl_seq * a; if ( - (a = _gather_35_rule(p)) + (a = _gather_35_rule(p)) // ','.dotted_as_name+ ) { res = a; @@ -2545,9 +2545,9 @@ dotted_as_name_rule(Parser *p) expr_ty a; void *b; if ( - (a = dotted_name_rule(p)) + (a = dotted_name_rule(p)) // dotted_name && - (b = _tmp_37_rule(p), 1) + (b = _tmp_37_rule(p), 1) // ['as' NAME] ) { res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); @@ -2603,11 +2603,11 @@ dotted_name_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = dotted_name_rule(p)) + (a = dotted_name_rule(p)) // dotted_name && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME ) { res = _PyPegen_join_names_with_dot ( p , a , b ); @@ -2622,7 +2622,7 @@ dotted_name_raw(Parser *p) { // NAME expr_ty name_var; if ( - (name_var = _PyPegen_name_token(p)) + (name_var = _PyPegen_name_token(p)) // NAME ) { res = name_var; @@ -2661,15 +2661,15 @@ if_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = elif_stmt_rule(p)) + (c = elif_stmt_rule(p)) // elif_stmt ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2696,15 +2696,15 @@ if_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = else_block_rule(p), 1) + (c = else_block_rule(p), 1) // else_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2755,15 +2755,15 @@ elif_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 515)) + (keyword = _PyPegen_expect_token(p, 515)) // token='elif' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = elif_stmt_rule(p)) + (c = elif_stmt_rule(p)) // elif_stmt ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2790,15 +2790,15 @@ elif_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 515)) + (keyword = _PyPegen_expect_token(p, 515)) // token='elif' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = else_block_rule(p), 1) + (c = else_block_rule(p), 1) // else_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2837,11 +2837,11 @@ else_block_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 516)) + (keyword = _PyPegen_expect_token(p, 516)) // token='else' && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { res = b; @@ -2882,15 +2882,15 @@ while_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 512)) + (keyword = _PyPegen_expect_token(p, 512)) // token='while' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = else_block_rule(p), 1) + (c = else_block_rule(p), 1) // else_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2944,21 +2944,21 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 517)) + (keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (t = star_targets_rule(p)) + (t = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (ex = star_expressions_rule(p)) + (ex = star_expressions_rule(p)) // star_expressions && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (el = else_block_rule(p), 1) + (el = else_block_rule(p), 1) // else_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -2989,23 +2989,23 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 517)) + (keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (t = star_targets_rule(p)) + (t = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (ex = star_expressions_rule(p)) + (ex = star_expressions_rule(p)) // star_expressions && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (el = else_block_rule(p), 1) + (el = else_block_rule(p), 1) // else_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3061,19 +3061,19 @@ with_stmt_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (keyword = _PyPegen_expect_token(p, 519)) + (keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _gather_38_rule(p)) + (a = _gather_38_rule(p)) // ','.with_item+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 11)) + (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3100,15 +3100,15 @@ with_stmt_rule(Parser *p) Token * literal; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 519)) + (keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (a = _gather_40_rule(p)) + (a = _gather_40_rule(p)) // ','.with_item+ && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3139,21 +3139,21 @@ with_stmt_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 519)) + (keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _gather_42_rule(p)) + (a = _gather_42_rule(p)) // ','.with_item+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 11)) + (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3181,17 +3181,17 @@ with_stmt_rule(Parser *p) Token * literal; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 519)) + (keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (a = _gather_44_rule(p)) + (a = _gather_44_rule(p)) // ','.with_item+ && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3229,9 +3229,9 @@ with_item_rule(Parser *p) expr_ty e; void *o; if ( - (e = expression_rule(p)) + (e = expression_rule(p)) // expression && - (o = _tmp_46_rule(p), 1) + (o = _tmp_46_rule(p), 1) // ['as' target] ) { res = _Py_withitem ( e , o , p -> arena ); @@ -3273,13 +3273,13 @@ try_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 511)) + (keyword = _PyPegen_expect_token(p, 511)) // token='try' && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (f = finally_block_rule(p)) + (f = finally_block_rule(p)) // finally_block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3307,17 +3307,17 @@ try_stmt_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 511)) + (keyword = _PyPegen_expect_token(p, 511)) // token='try' && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (ex = _loop1_47_rule(p)) + (ex = _loop1_47_rule(p)) // except_block+ && - (el = else_block_rule(p), 1) + (el = else_block_rule(p), 1) // else_block? && - (f = finally_block_rule(p), 1) + (f = finally_block_rule(p), 1) // finally_block? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3366,15 +3366,15 @@ except_block_rule(Parser *p) Token * literal; void *t; if ( - (keyword = _PyPegen_expect_token(p, 520)) + (keyword = _PyPegen_expect_token(p, 520)) // token='except' && - (e = expression_rule(p)) + (e = expression_rule(p)) // expression && - (t = _tmp_48_rule(p), 1) + (t = _tmp_48_rule(p), 1) // ['as' target] && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3399,11 +3399,11 @@ except_block_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 520)) + (keyword = _PyPegen_expect_token(p, 520)) // token='except' && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3442,11 +3442,11 @@ finally_block_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 521)) + (keyword = _PyPegen_expect_token(p, 521)) // token='finally' && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (a = block_rule(p)) + (a = block_rule(p)) // block ) { res = a; @@ -3484,9 +3484,9 @@ return_stmt_rule(Parser *p) void *a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 500)) + (keyword = _PyPegen_expect_token(p, 500)) // token='return' && - (a = star_expressions_rule(p), 1) + (a = star_expressions_rule(p), 1) // star_expressions? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3533,11 +3533,11 @@ raise_stmt_rule(Parser *p) void *b; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 501)) + (keyword = _PyPegen_expect_token(p, 501)) // token='raise' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = _tmp_49_rule(p), 1) + (b = _tmp_49_rule(p), 1) // ['from' expression] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3560,7 +3560,7 @@ raise_stmt_rule(Parser *p) { // 'raise' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 501)) + (keyword = _PyPegen_expect_token(p, 501)) // token='raise' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3598,9 +3598,9 @@ function_def_rule(Parser *p) asdl_seq* d; stmt_ty f; if ( - (d = decorators_rule(p)) + (d = decorators_rule(p)) // decorators && - (f = function_def_raw_rule(p)) + (f = function_def_raw_rule(p)) // function_def_raw ) { res = _PyPegen_function_def_decorators ( p , d , f ); @@ -3615,7 +3615,7 @@ function_def_rule(Parser *p) { // function_def_raw stmt_ty function_def_raw_var; if ( - (function_def_raw_var = function_def_raw_rule(p)) + (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw ) { res = function_def_raw_var; @@ -3658,23 +3658,23 @@ function_def_raw_rule(Parser *p) void *params; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 522)) + (keyword = _PyPegen_expect_token(p, 522)) // token='def' && - (n = _PyPegen_name_token(p)) + (n = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (params = params_rule(p), 1) + (params = params_rule(p), 1) // params? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (a = _tmp_50_rule(p), 1) + (a = _tmp_50_rule(p), 1) // ['->' expression] && - (literal_2 = _PyPegen_expect_token(p, 11)) + (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = func_type_comment_rule(p), 1) + (tc = func_type_comment_rule(p), 1) // func_type_comment? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3706,25 +3706,25 @@ function_def_raw_rule(Parser *p) void *params; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 522)) + (keyword = _PyPegen_expect_token(p, 522)) // token='def' && - (n = _PyPegen_name_token(p)) + (n = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (params = params_rule(p), 1) + (params = params_rule(p), 1) // params? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (a = _tmp_51_rule(p), 1) + (a = _tmp_51_rule(p), 1) // ['->' expression] && - (literal_2 = _PyPegen_expect_token(p, 11)) + (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = func_type_comment_rule(p), 1) + (tc = func_type_comment_rule(p), 1) // func_type_comment? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -3765,9 +3765,9 @@ func_type_comment_rule(Parser *p) Token * newline_var; Token * t; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (t = _PyPegen_expect_token(p, TYPE_COMMENT)) + (t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && _PyPegen_lookahead(1, _tmp_52_rule, p) ) @@ -3784,7 +3784,7 @@ func_type_comment_rule(Parser *p) { // invalid_double_type_comments void *invalid_double_type_comments_var; if ( - (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) + (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments ) { res = invalid_double_type_comments_var; @@ -3795,7 +3795,7 @@ func_type_comment_rule(Parser *p) { // TYPE_COMMENT Token * type_comment_var; if ( - (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) + (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' ) { res = type_comment_var; @@ -3820,7 +3820,7 @@ params_rule(Parser *p) { // invalid_parameters void *invalid_parameters_var; if ( - (invalid_parameters_var = invalid_parameters_rule(p)) + (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters ) { res = invalid_parameters_var; @@ -3831,7 +3831,7 @@ params_rule(Parser *p) { // parameters arguments_ty parameters_var; if ( - (parameters_var = parameters_rule(p)) + (parameters_var = parameters_rule(p)) // parameters ) { res = parameters_var; @@ -3864,13 +3864,13 @@ parameters_rule(Parser *p) asdl_seq * c; void *d; if ( - (a = slash_no_default_rule(p)) + (a = slash_no_default_rule(p)) // slash_no_default && - (b = _loop0_53_rule(p)) + (b = _loop0_53_rule(p)) // param_no_default* && - (c = _loop0_54_rule(p)) + (c = _loop0_54_rule(p)) // param_with_default* && - (d = star_etc_rule(p), 1) + (d = star_etc_rule(p), 1) // star_etc? ) { res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); @@ -3887,11 +3887,11 @@ parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = slash_with_default_rule(p)) + (a = slash_with_default_rule(p)) // slash_with_default && - (b = _loop0_55_rule(p)) + (b = _loop0_55_rule(p)) // param_with_default* && - (c = star_etc_rule(p), 1) + (c = star_etc_rule(p), 1) // star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); @@ -3908,11 +3908,11 @@ parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = _loop1_56_rule(p)) + (a = _loop1_56_rule(p)) // param_no_default+ && - (b = _loop0_57_rule(p)) + (b = _loop0_57_rule(p)) // param_with_default* && - (c = star_etc_rule(p), 1) + (c = star_etc_rule(p), 1) // star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); @@ -3928,9 +3928,9 @@ parameters_rule(Parser *p) asdl_seq * a; void *b; if ( - (a = _loop1_58_rule(p)) + (a = _loop1_58_rule(p)) // param_with_default+ && - (b = star_etc_rule(p), 1) + (b = star_etc_rule(p), 1) // star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); @@ -3945,7 +3945,7 @@ parameters_rule(Parser *p) { // star_etc StarEtc* a; if ( - (a = star_etc_rule(p)) + (a = star_etc_rule(p)) // star_etc ) { res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); @@ -3976,11 +3976,11 @@ slash_no_default_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _loop1_59_rule(p)) + (a = _loop1_59_rule(p)) // param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { res = a; @@ -3996,11 +3996,11 @@ slash_no_default_rule(Parser *p) asdl_seq * a; Token * literal; if ( - (a = _loop1_60_rule(p)) + (a = _loop1_60_rule(p)) // param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = a; @@ -4034,13 +4034,13 @@ slash_with_default_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _loop0_61_rule(p)) + (a = _loop0_61_rule(p)) // param_no_default* && - (b = _loop1_62_rule(p)) + (b = _loop1_62_rule(p)) // param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { res = _PyPegen_slash_with_default ( p , a , b ); @@ -4057,13 +4057,13 @@ slash_with_default_rule(Parser *p) asdl_seq * b; Token * literal; if ( - (a = _loop0_63_rule(p)) + (a = _loop0_63_rule(p)) // param_no_default* && - (b = _loop1_64_rule(p)) + (b = _loop1_64_rule(p)) // param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = _PyPegen_slash_with_default ( p , a , b ); @@ -4099,13 +4099,13 @@ star_etc_rule(Parser *p) void *c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = param_no_default_rule(p)) + (a = param_no_default_rule(p)) // param_no_default && - (b = _loop0_65_rule(p)) + (b = _loop0_65_rule(p)) // param_maybe_default* && - (c = kwds_rule(p), 1) + (c = kwds_rule(p), 1) // kwds? ) { res = _PyPegen_star_etc ( p , a , b , c ); @@ -4123,13 +4123,13 @@ star_etc_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _loop1_66_rule(p)) + (b = _loop1_66_rule(p)) // param_maybe_default+ && - (c = kwds_rule(p), 1) + (c = kwds_rule(p), 1) // kwds? ) { res = _PyPegen_star_etc ( p , NULL , b , c ); @@ -4144,7 +4144,7 @@ star_etc_rule(Parser *p) { // kwds arg_ty a; if ( - (a = kwds_rule(p)) + (a = kwds_rule(p)) // kwds ) { res = _PyPegen_star_etc ( p , NULL , NULL , a ); @@ -4159,7 +4159,7 @@ star_etc_rule(Parser *p) { // invalid_star_etc void *invalid_star_etc_var; if ( - (invalid_star_etc_var = invalid_star_etc_rule(p)) + (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc ) { res = invalid_star_etc_var; @@ -4185,9 +4185,9 @@ kwds_rule(Parser *p) arg_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = param_no_default_rule(p)) + (a = param_no_default_rule(p)) // param_no_default ) { res = a; @@ -4218,11 +4218,11 @@ param_no_default_rule(Parser *p) Token * literal; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); @@ -4238,11 +4238,11 @@ param_no_default_rule(Parser *p) arg_ty a; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); @@ -4274,13 +4274,13 @@ param_with_default_rule(Parser *p) Token * literal; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { res = _PyPegen_name_default_pair ( p , a , c , tc ); @@ -4297,13 +4297,13 @@ param_with_default_rule(Parser *p) expr_ty c; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = _PyPegen_name_default_pair ( p , a , c , tc ); @@ -4337,13 +4337,13 @@ param_maybe_default_rule(Parser *p) Token * literal; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { res = _PyPegen_name_default_pair ( p , a , c , tc ); @@ -4360,13 +4360,13 @@ param_maybe_default_rule(Parser *p) void *c; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = _PyPegen_name_default_pair ( p , a , c , tc ); @@ -4404,9 +4404,9 @@ param_rule(Parser *p) expr_ty a; void *b; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (b = annotation_rule(p), 1) + (b = annotation_rule(p), 1) // annotation? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4444,9 +4444,9 @@ annotation_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { res = a; @@ -4476,9 +4476,9 @@ default_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { res = a; @@ -4507,7 +4507,7 @@ decorators_rule(Parser *p) { // (('@' named_expression NEWLINE))+ asdl_seq * a; if ( - (a = _loop1_67_rule(p)) + (a = _loop1_67_rule(p)) // (('@' named_expression NEWLINE))+ ) { res = a; @@ -4537,9 +4537,9 @@ class_def_rule(Parser *p) asdl_seq* a; stmt_ty b; if ( - (a = decorators_rule(p)) + (a = decorators_rule(p)) // decorators && - (b = class_def_raw_rule(p)) + (b = class_def_raw_rule(p)) // class_def_raw ) { res = _PyPegen_class_def_decorators ( p , a , b ); @@ -4554,7 +4554,7 @@ class_def_rule(Parser *p) { // class_def_raw stmt_ty class_def_raw_var; if ( - (class_def_raw_var = class_def_raw_rule(p)) + (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw ) { res = class_def_raw_var; @@ -4591,15 +4591,15 @@ class_def_raw_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 523)) + (keyword = _PyPegen_expect_token(p, 523)) // token='class' && - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (b = _tmp_68_rule(p), 1) + (b = _tmp_68_rule(p), 1) // ['(' arguments? ')'] && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (c = block_rule(p)) + (c = block_rule(p)) // block ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4641,13 +4641,13 @@ block_rule(Parser *p) Token * indent_var; Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (indent_var = _PyPegen_expect_token(p, INDENT)) + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' && - (a = statements_rule(p)) + (a = statements_rule(p)) // statements && - (dedent_var = _PyPegen_expect_token(p, DEDENT)) + (dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT' ) { res = a; @@ -4662,7 +4662,7 @@ block_rule(Parser *p) { // simple_stmt asdl_seq* simple_stmt_var; if ( - (simple_stmt_var = simple_stmt_rule(p)) + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { res = simple_stmt_var; @@ -4673,7 +4673,7 @@ block_rule(Parser *p) { // invalid_block void *invalid_block_var; if ( - (invalid_block_var = invalid_block_rule(p)) + (invalid_block_var = invalid_block_rule(p)) // invalid_block ) { res = invalid_block_var; @@ -4701,9 +4701,9 @@ expressions_list_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_69_rule(p)) + (a = _gather_69_rule(p)) // ','.star_expression+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -4746,11 +4746,11 @@ star_expressions_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = star_expression_rule(p)) + (a = star_expression_rule(p)) // star_expression && - (b = _loop1_71_rule(p)) + (b = _loop1_71_rule(p)) // ((',' star_expression))+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4774,9 +4774,9 @@ star_expressions_rule(Parser *p) expr_ty a; Token * literal; if ( - (a = star_expression_rule(p)) + (a = star_expression_rule(p)) // star_expression && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4799,7 +4799,7 @@ star_expressions_rule(Parser *p) { // star_expression expr_ty star_expression_var; if ( - (star_expression_var = star_expression_rule(p)) + (star_expression_var = star_expression_rule(p)) // star_expression ) { res = star_expression_var; @@ -4835,9 +4835,9 @@ star_expression_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4860,7 +4860,7 @@ star_expression_rule(Parser *p) { // expression expr_ty expression_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression ) { res = expression_var; @@ -4888,9 +4888,9 @@ star_named_expressions_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_72_rule(p)) + (a = _gather_72_rule(p)) // ','.star_named_expression+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -4928,9 +4928,9 @@ star_named_expression_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -4953,7 +4953,7 @@ star_named_expression_rule(Parser *p) { // named_expression expr_ty named_expression_var; if ( - (named_expression_var = named_expression_rule(p)) + (named_expression_var = named_expression_rule(p)) // named_expression ) { res = named_expression_var; @@ -4988,11 +4988,11 @@ named_expression_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 53)) + (literal = _PyPegen_expect_token(p, 53)) // token=':=' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5015,9 +5015,9 @@ named_expression_rule(Parser *p) { // expression !':=' expr_ty expression_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { res = expression_var; @@ -5028,7 +5028,7 @@ named_expression_rule(Parser *p) { // invalid_named_expression void *invalid_named_expression_var; if ( - (invalid_named_expression_var = invalid_named_expression_rule(p)) + (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression ) { res = invalid_named_expression_var; @@ -5053,7 +5053,7 @@ annotated_rhs_rule(Parser *p) { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { res = yield_expr_var; @@ -5064,7 +5064,7 @@ annotated_rhs_rule(Parser *p) { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { res = star_expressions_var; @@ -5100,11 +5100,11 @@ expressions_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = _loop1_74_rule(p)) + (b = _loop1_74_rule(p)) // ((',' expression))+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5128,9 +5128,9 @@ expressions_rule(Parser *p) expr_ty a; Token * literal; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5153,7 +5153,7 @@ expressions_rule(Parser *p) { // expression expr_ty expression_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression ) { res = expression_var; @@ -5192,15 +5192,15 @@ expression_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (a = disjunction_rule(p)) + (a = disjunction_rule(p)) // disjunction && - (keyword = _PyPegen_expect_token(p, 510)) + (keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (b = disjunction_rule(p)) + (b = disjunction_rule(p)) // disjunction && - (keyword_1 = _PyPegen_expect_token(p, 516)) + (keyword_1 = _PyPegen_expect_token(p, 516)) // token='else' && - (c = expression_rule(p)) + (c = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5223,7 +5223,7 @@ expression_rule(Parser *p) { // disjunction expr_ty disjunction_var; if ( - (disjunction_var = disjunction_rule(p)) + (disjunction_var = disjunction_rule(p)) // disjunction ) { res = disjunction_var; @@ -5234,7 +5234,7 @@ expression_rule(Parser *p) { // lambdef expr_ty lambdef_var; if ( - (lambdef_var = lambdef_rule(p)) + (lambdef_var = lambdef_rule(p)) // lambdef ) { res = lambdef_var; @@ -5271,13 +5271,13 @@ lambdef_rule(Parser *p) Token * keyword; Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 524)) + (keyword = _PyPegen_expect_token(p, 524)) // token='lambda' && - (a = lambda_parameters_rule(p), 1) + (a = lambda_parameters_rule(p), 1) // lambda_parameters? && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5322,13 +5322,13 @@ lambda_parameters_rule(Parser *p) asdl_seq * c; void *d; if ( - (a = lambda_slash_no_default_rule(p)) + (a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default && - (b = _loop0_75_rule(p)) + (b = _loop0_75_rule(p)) // lambda_param_no_default* && - (c = _loop0_76_rule(p)) + (c = _loop0_76_rule(p)) // lambda_param_with_default* && - (d = lambda_star_etc_rule(p), 1) + (d = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); @@ -5345,11 +5345,11 @@ lambda_parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = lambda_slash_with_default_rule(p)) + (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default && - (b = _loop0_77_rule(p)) + (b = _loop0_77_rule(p)) // lambda_param_with_default* && - (c = lambda_star_etc_rule(p), 1) + (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); @@ -5366,11 +5366,11 @@ lambda_parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = _loop1_78_rule(p)) + (a = _loop1_78_rule(p)) // lambda_param_no_default+ && - (b = _loop0_79_rule(p)) + (b = _loop0_79_rule(p)) // lambda_param_with_default* && - (c = lambda_star_etc_rule(p), 1) + (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); @@ -5386,9 +5386,9 @@ lambda_parameters_rule(Parser *p) asdl_seq * a; void *b; if ( - (a = _loop1_80_rule(p)) + (a = _loop1_80_rule(p)) // lambda_param_with_default+ && - (b = lambda_star_etc_rule(p), 1) + (b = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); @@ -5403,7 +5403,7 @@ lambda_parameters_rule(Parser *p) { // lambda_star_etc StarEtc* a; if ( - (a = lambda_star_etc_rule(p)) + (a = lambda_star_etc_rule(p)) // lambda_star_etc ) { res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); @@ -5436,11 +5436,11 @@ lambda_slash_no_default_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _loop1_81_rule(p)) + (a = _loop1_81_rule(p)) // lambda_param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { res = a; @@ -5456,11 +5456,11 @@ lambda_slash_no_default_rule(Parser *p) asdl_seq * a; Token * literal; if ( - (a = _loop1_82_rule(p)) + (a = _loop1_82_rule(p)) // lambda_param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { res = a; @@ -5494,13 +5494,13 @@ lambda_slash_with_default_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = _loop0_83_rule(p)) + (a = _loop0_83_rule(p)) // lambda_param_no_default* && - (b = _loop1_84_rule(p)) + (b = _loop1_84_rule(p)) // lambda_param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { res = _PyPegen_slash_with_default ( p , a , b ); @@ -5517,13 +5517,13 @@ lambda_slash_with_default_rule(Parser *p) asdl_seq * b; Token * literal; if ( - (a = _loop0_85_rule(p)) + (a = _loop0_85_rule(p)) // lambda_param_no_default* && - (b = _loop1_86_rule(p)) + (b = _loop1_86_rule(p)) // lambda_param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { res = _PyPegen_slash_with_default ( p , a , b ); @@ -5559,13 +5559,13 @@ lambda_star_etc_rule(Parser *p) void *c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = lambda_param_no_default_rule(p)) + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default && - (b = _loop0_87_rule(p)) + (b = _loop0_87_rule(p)) // lambda_param_maybe_default* && - (c = lambda_kwds_rule(p), 1) + (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { res = _PyPegen_star_etc ( p , a , b , c ); @@ -5583,13 +5583,13 @@ lambda_star_etc_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _loop1_88_rule(p)) + (b = _loop1_88_rule(p)) // lambda_param_maybe_default+ && - (c = lambda_kwds_rule(p), 1) + (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { res = _PyPegen_star_etc ( p , NULL , b , c ); @@ -5604,7 +5604,7 @@ lambda_star_etc_rule(Parser *p) { // lambda_kwds arg_ty a; if ( - (a = lambda_kwds_rule(p)) + (a = lambda_kwds_rule(p)) // lambda_kwds ) { res = _PyPegen_star_etc ( p , NULL , NULL , a ); @@ -5619,7 +5619,7 @@ lambda_star_etc_rule(Parser *p) { // invalid_lambda_star_etc void *invalid_lambda_star_etc_var; if ( - (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) + (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc ) { res = invalid_lambda_star_etc_var; @@ -5645,9 +5645,9 @@ lambda_kwds_rule(Parser *p) arg_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = lambda_param_no_default_rule(p)) + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = a; @@ -5677,9 +5677,9 @@ lambda_param_no_default_rule(Parser *p) arg_ty a; Token * literal; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' ) { res = a; @@ -5694,9 +5694,9 @@ lambda_param_no_default_rule(Parser *p) { // lambda_param &':' arg_ty a; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { res = a; @@ -5727,11 +5727,11 @@ lambda_param_with_default_rule(Parser *p) expr_ty c; Token * literal; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' ) { res = _PyPegen_name_default_pair ( p , a , c , NULL ); @@ -5747,11 +5747,11 @@ lambda_param_with_default_rule(Parser *p) arg_ty a; expr_ty c; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { res = _PyPegen_name_default_pair ( p , a , c , NULL ); @@ -5782,11 +5782,11 @@ lambda_param_maybe_default_rule(Parser *p) void *c; Token * literal; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' ) { res = _PyPegen_name_default_pair ( p , a , c , NULL ); @@ -5802,11 +5802,11 @@ lambda_param_maybe_default_rule(Parser *p) arg_ty a; void *c; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { res = _PyPegen_name_default_pair ( p , a , c , NULL ); @@ -5843,7 +5843,7 @@ lambda_param_rule(Parser *p) { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5891,9 +5891,9 @@ disjunction_rule(Parser *p) expr_ty a; asdl_seq * b; if ( - (a = conjunction_rule(p)) + (a = conjunction_rule(p)) // conjunction && - (b = _loop1_89_rule(p)) + (b = _loop1_89_rule(p)) // (('or' conjunction))+ ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5916,7 +5916,7 @@ disjunction_rule(Parser *p) { // conjunction expr_ty conjunction_var; if ( - (conjunction_var = conjunction_rule(p)) + (conjunction_var = conjunction_rule(p)) // conjunction ) { res = conjunction_var; @@ -5953,9 +5953,9 @@ conjunction_rule(Parser *p) expr_ty a; asdl_seq * b; if ( - (a = inversion_rule(p)) + (a = inversion_rule(p)) // inversion && - (b = _loop1_90_rule(p)) + (b = _loop1_90_rule(p)) // (('and' inversion))+ ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -5978,7 +5978,7 @@ conjunction_rule(Parser *p) { // inversion expr_ty inversion_var; if ( - (inversion_var = inversion_rule(p)) + (inversion_var = inversion_rule(p)) // inversion ) { res = inversion_var; @@ -6015,9 +6015,9 @@ inversion_rule(Parser *p) expr_ty a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 525)) + (keyword = _PyPegen_expect_token(p, 525)) // token='not' && - (a = inversion_rule(p)) + (a = inversion_rule(p)) // inversion ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6040,7 +6040,7 @@ inversion_rule(Parser *p) { // comparison expr_ty comparison_var; if ( - (comparison_var = comparison_rule(p)) + (comparison_var = comparison_rule(p)) // comparison ) { res = comparison_var; @@ -6075,9 +6075,9 @@ comparison_rule(Parser *p) expr_ty a; asdl_seq * b; if ( - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or && - (b = _loop1_91_rule(p)) + (b = _loop1_91_rule(p)) // compare_op_bitwise_or_pair+ ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6100,7 +6100,7 @@ comparison_rule(Parser *p) { // bitwise_or expr_ty bitwise_or_var; if ( - (bitwise_or_var = bitwise_or_rule(p)) + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or ) { res = bitwise_or_var; @@ -6135,7 +6135,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // eq_bitwise_or CmpopExprPair* eq_bitwise_or_var; if ( - (eq_bitwise_or_var = eq_bitwise_or_rule(p)) + (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or ) { res = eq_bitwise_or_var; @@ -6146,7 +6146,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // noteq_bitwise_or CmpopExprPair* noteq_bitwise_or_var; if ( - (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) + (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or ) { res = noteq_bitwise_or_var; @@ -6157,7 +6157,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // lte_bitwise_or CmpopExprPair* lte_bitwise_or_var; if ( - (lte_bitwise_or_var = lte_bitwise_or_rule(p)) + (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or ) { res = lte_bitwise_or_var; @@ -6168,7 +6168,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // lt_bitwise_or CmpopExprPair* lt_bitwise_or_var; if ( - (lt_bitwise_or_var = lt_bitwise_or_rule(p)) + (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or ) { res = lt_bitwise_or_var; @@ -6179,7 +6179,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // gte_bitwise_or CmpopExprPair* gte_bitwise_or_var; if ( - (gte_bitwise_or_var = gte_bitwise_or_rule(p)) + (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or ) { res = gte_bitwise_or_var; @@ -6190,7 +6190,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // gt_bitwise_or CmpopExprPair* gt_bitwise_or_var; if ( - (gt_bitwise_or_var = gt_bitwise_or_rule(p)) + (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or ) { res = gt_bitwise_or_var; @@ -6201,7 +6201,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // notin_bitwise_or CmpopExprPair* notin_bitwise_or_var; if ( - (notin_bitwise_or_var = notin_bitwise_or_rule(p)) + (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or ) { res = notin_bitwise_or_var; @@ -6212,7 +6212,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // in_bitwise_or CmpopExprPair* in_bitwise_or_var; if ( - (in_bitwise_or_var = in_bitwise_or_rule(p)) + (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or ) { res = in_bitwise_or_var; @@ -6223,7 +6223,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // isnot_bitwise_or CmpopExprPair* isnot_bitwise_or_var; if ( - (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) + (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or ) { res = isnot_bitwise_or_var; @@ -6234,7 +6234,7 @@ compare_op_bitwise_or_pair_rule(Parser *p) { // is_bitwise_or CmpopExprPair* is_bitwise_or_var; if ( - (is_bitwise_or_var = is_bitwise_or_rule(p)) + (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or ) { res = is_bitwise_or_var; @@ -6260,9 +6260,9 @@ eq_bitwise_or_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 27)) + (literal = _PyPegen_expect_token(p, 27)) // token='==' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); @@ -6292,9 +6292,9 @@ noteq_bitwise_or_rule(Parser *p) void *_tmp_92_var; expr_ty a; if ( - (_tmp_92_var = _tmp_92_rule(p)) + (_tmp_92_var = _tmp_92_rule(p)) // '!=' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); @@ -6324,9 +6324,9 @@ lte_bitwise_or_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 29)) + (literal = _PyPegen_expect_token(p, 29)) // token='<=' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); @@ -6356,9 +6356,9 @@ lt_bitwise_or_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 20)) + (literal = _PyPegen_expect_token(p, 20)) // token='<' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); @@ -6388,9 +6388,9 @@ gte_bitwise_or_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 30)) + (literal = _PyPegen_expect_token(p, 30)) // token='>=' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); @@ -6420,9 +6420,9 @@ gt_bitwise_or_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 21)) + (literal = _PyPegen_expect_token(p, 21)) // token='>' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); @@ -6453,11 +6453,11 @@ notin_bitwise_or_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 525)) + (keyword = _PyPegen_expect_token(p, 525)) // token='not' && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); @@ -6487,9 +6487,9 @@ in_bitwise_or_rule(Parser *p) expr_ty a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 518)) + (keyword = _PyPegen_expect_token(p, 518)) // token='in' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , In , a ); @@ -6520,11 +6520,11 @@ isnot_bitwise_or_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 526)) + (keyword = _PyPegen_expect_token(p, 526)) // token='is' && - (keyword_1 = _PyPegen_expect_token(p, 525)) + (keyword_1 = _PyPegen_expect_token(p, 525)) // token='not' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); @@ -6554,9 +6554,9 @@ is_bitwise_or_rule(Parser *p) expr_ty a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 526)) + (keyword = _PyPegen_expect_token(p, 526)) // token='is' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_cmpop_expr_pair ( p , Is , a ); @@ -6620,11 +6620,11 @@ bitwise_or_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or && - (literal = _PyPegen_expect_token(p, 18)) + (literal = _PyPegen_expect_token(p, 18)) // token='|' && - (b = bitwise_xor_rule(p)) + (b = bitwise_xor_rule(p)) // bitwise_xor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6647,7 +6647,7 @@ bitwise_or_raw(Parser *p) { // bitwise_xor expr_ty bitwise_xor_var; if ( - (bitwise_xor_var = bitwise_xor_rule(p)) + (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor ) { res = bitwise_xor_var; @@ -6707,11 +6707,11 @@ bitwise_xor_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = bitwise_xor_rule(p)) + (a = bitwise_xor_rule(p)) // bitwise_xor && - (literal = _PyPegen_expect_token(p, 32)) + (literal = _PyPegen_expect_token(p, 32)) // token='^' && - (b = bitwise_and_rule(p)) + (b = bitwise_and_rule(p)) // bitwise_and ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6734,7 +6734,7 @@ bitwise_xor_raw(Parser *p) { // bitwise_and expr_ty bitwise_and_var; if ( - (bitwise_and_var = bitwise_and_rule(p)) + (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and ) { res = bitwise_and_var; @@ -6794,11 +6794,11 @@ bitwise_and_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = bitwise_and_rule(p)) + (a = bitwise_and_rule(p)) // bitwise_and && - (literal = _PyPegen_expect_token(p, 19)) + (literal = _PyPegen_expect_token(p, 19)) // token='&' && - (b = shift_expr_rule(p)) + (b = shift_expr_rule(p)) // shift_expr ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6821,7 +6821,7 @@ bitwise_and_raw(Parser *p) { // shift_expr expr_ty shift_expr_var; if ( - (shift_expr_var = shift_expr_rule(p)) + (shift_expr_var = shift_expr_rule(p)) // shift_expr ) { res = shift_expr_var; @@ -6881,11 +6881,11 @@ shift_expr_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = shift_expr_rule(p)) + (a = shift_expr_rule(p)) // shift_expr && - (literal = _PyPegen_expect_token(p, 33)) + (literal = _PyPegen_expect_token(p, 33)) // token='<<' && - (b = sum_rule(p)) + (b = sum_rule(p)) // sum ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6910,11 +6910,11 @@ shift_expr_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = shift_expr_rule(p)) + (a = shift_expr_rule(p)) // shift_expr && - (literal = _PyPegen_expect_token(p, 34)) + (literal = _PyPegen_expect_token(p, 34)) // token='>>' && - (b = sum_rule(p)) + (b = sum_rule(p)) // sum ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -6937,7 +6937,7 @@ shift_expr_raw(Parser *p) { // sum expr_ty sum_var; if ( - (sum_var = sum_rule(p)) + (sum_var = sum_rule(p)) // sum ) { res = sum_var; @@ -6997,11 +6997,11 @@ sum_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = sum_rule(p)) + (a = sum_rule(p)) // sum && - (literal = _PyPegen_expect_token(p, 14)) + (literal = _PyPegen_expect_token(p, 14)) // token='+' && - (b = term_rule(p)) + (b = term_rule(p)) // term ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7026,11 +7026,11 @@ sum_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = sum_rule(p)) + (a = sum_rule(p)) // sum && - (literal = _PyPegen_expect_token(p, 15)) + (literal = _PyPegen_expect_token(p, 15)) // token='-' && - (b = term_rule(p)) + (b = term_rule(p)) // term ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7053,7 +7053,7 @@ sum_raw(Parser *p) { // term expr_ty term_var; if ( - (term_var = term_rule(p)) + (term_var = term_rule(p)) // term ) { res = term_var; @@ -7119,11 +7119,11 @@ term_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7148,11 +7148,11 @@ term_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 17)) + (literal = _PyPegen_expect_token(p, 17)) // token='/' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7177,11 +7177,11 @@ term_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 47)) + (literal = _PyPegen_expect_token(p, 47)) // token='//' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7206,11 +7206,11 @@ term_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 24)) + (literal = _PyPegen_expect_token(p, 24)) // token='%' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7235,11 +7235,11 @@ term_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 49)) + (literal = _PyPegen_expect_token(p, 49)) // token='@' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7262,7 +7262,7 @@ term_raw(Parser *p) { // factor expr_ty factor_var; if ( - (factor_var = factor_rule(p)) + (factor_var = factor_rule(p)) // factor ) { res = factor_var; @@ -7298,9 +7298,9 @@ factor_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 14)) + (literal = _PyPegen_expect_token(p, 14)) // token='+' && - (a = factor_rule(p)) + (a = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7324,9 +7324,9 @@ factor_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 15)) + (literal = _PyPegen_expect_token(p, 15)) // token='-' && - (a = factor_rule(p)) + (a = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7350,9 +7350,9 @@ factor_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 31)) + (literal = _PyPegen_expect_token(p, 31)) // token='~' && - (a = factor_rule(p)) + (a = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7375,7 +7375,7 @@ factor_rule(Parser *p) { // power expr_ty power_var; if ( - (power_var = power_rule(p)) + (power_var = power_rule(p)) // power ) { res = power_var; @@ -7411,11 +7411,11 @@ power_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = await_primary_rule(p)) + (a = await_primary_rule(p)) // await_primary && - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7438,7 +7438,7 @@ power_rule(Parser *p) { // await_primary expr_ty await_primary_var; if ( - (await_primary_var = await_primary_rule(p)) + (await_primary_var = await_primary_rule(p)) // await_primary ) { res = await_primary_var; @@ -7474,9 +7474,9 @@ await_primary_rule(Parser *p) expr_ty a; Token * await_var; if ( - (await_var = _PyPegen_expect_token(p, AWAIT)) + (await_var = _PyPegen_expect_token(p, AWAIT)) // token='AWAIT' && - (a = primary_rule(p)) + (a = primary_rule(p)) // primary ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7499,7 +7499,7 @@ await_primary_rule(Parser *p) { // primary expr_ty primary_var; if ( - (primary_var = primary_rule(p)) + (primary_var = primary_rule(p)) // primary ) { res = primary_var; @@ -7565,11 +7565,11 @@ primary_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7593,9 +7593,9 @@ primary_raw(Parser *p) expr_ty a; expr_ty b; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (b = genexp_rule(p)) + (b = genexp_rule(p)) // genexp ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7621,13 +7621,13 @@ primary_raw(Parser *p) Token * literal; Token * literal_1; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = arguments_rule(p), 1) + (b = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7653,13 +7653,13 @@ primary_raw(Parser *p) Token * literal; Token * literal_1; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7682,7 +7682,7 @@ primary_raw(Parser *p) { // atom expr_ty atom_var; if ( - (atom_var = atom_rule(p)) + (atom_var = atom_rule(p)) // atom ) { res = atom_var; @@ -7715,9 +7715,9 @@ slices_rule(Parser *p) { // slice !',' expr_ty a; if ( - (a = slice_rule(p)) + (a = slice_rule(p)) // slice && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { res = a; @@ -7734,9 +7734,9 @@ slices_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_93_rule(p)) + (a = _gather_93_rule(p)) // ','.slice+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7784,13 +7784,13 @@ slice_rule(Parser *p) void *c; Token * literal; if ( - (a = expression_rule(p), 1) + (a = expression_rule(p), 1) // expression? && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p), 1) + (b = expression_rule(p), 1) // expression? && - (c = _tmp_95_rule(p), 1) + (c = _tmp_95_rule(p), 1) // [':' expression?] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7813,7 +7813,7 @@ slice_rule(Parser *p) { // expression expr_ty a; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { res = a; @@ -7861,7 +7861,7 @@ atom_rule(Parser *p) { // NAME expr_ty name_var; if ( - (name_var = _PyPegen_name_token(p)) + (name_var = _PyPegen_name_token(p)) // NAME ) { res = name_var; @@ -7872,7 +7872,7 @@ atom_rule(Parser *p) { // 'True' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 527)) + (keyword = _PyPegen_expect_token(p, 527)) // token='True' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7895,7 +7895,7 @@ atom_rule(Parser *p) { // 'False' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 528)) + (keyword = _PyPegen_expect_token(p, 528)) // token='False' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7918,7 +7918,7 @@ atom_rule(Parser *p) { // 'None' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 529)) + (keyword = _PyPegen_expect_token(p, 529)) // token='None' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -7941,7 +7941,7 @@ atom_rule(Parser *p) { // '__new_parser__' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 530)) + (keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' ) { res = RAISE_SYNTAX_ERROR ( "You found it!" ); @@ -7958,7 +7958,7 @@ atom_rule(Parser *p) if ( _PyPegen_lookahead(1, _PyPegen_string_token, p) && - (strings_var = strings_rule(p)) + (strings_var = strings_rule(p)) // strings ) { res = strings_var; @@ -7969,7 +7969,7 @@ atom_rule(Parser *p) { // NUMBER expr_ty number_var; if ( - (number_var = _PyPegen_number_token(p)) + (number_var = _PyPegen_number_token(p)) // NUMBER ) { res = number_var; @@ -7980,9 +7980,9 @@ atom_rule(Parser *p) { // &'(' (tuple | group | genexp) void *_tmp_96_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='(' && - (_tmp_96_var = _tmp_96_rule(p)) + (_tmp_96_var = _tmp_96_rule(p)) // tuple | group | genexp ) { res = _tmp_96_var; @@ -7993,9 +7993,9 @@ atom_rule(Parser *p) { // &'[' (list | listcomp) void *_tmp_97_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='[' && - (_tmp_97_var = _tmp_97_rule(p)) + (_tmp_97_var = _tmp_97_rule(p)) // list | listcomp ) { res = _tmp_97_var; @@ -8006,9 +8006,9 @@ atom_rule(Parser *p) { // &'{' (dict | set | dictcomp | setcomp) void *_tmp_98_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{' && - (_tmp_98_var = _tmp_98_rule(p)) + (_tmp_98_var = _tmp_98_rule(p)) // dict | set | dictcomp | setcomp ) { res = _tmp_98_var; @@ -8019,7 +8019,7 @@ atom_rule(Parser *p) { // '...' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 52)) + (literal = _PyPegen_expect_token(p, 52)) // token='...' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8058,7 +8058,7 @@ strings_rule(Parser *p) { // STRING+ asdl_seq * a; if ( - (a = _loop1_99_rule(p)) + (a = _loop1_99_rule(p)) // STRING+ ) { res = _PyPegen_concatenate_strings ( p , a ); @@ -8098,11 +8098,11 @@ list_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = star_named_expressions_rule(p), 1) + (a = star_named_expressions_rule(p), 1) // star_named_expressions? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8150,13 +8150,13 @@ listcomp_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8179,7 +8179,7 @@ listcomp_rule(Parser *p) { // invalid_comprehension void *invalid_comprehension_var; if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { res = invalid_comprehension_var; @@ -8214,11 +8214,11 @@ tuple_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_100_rule(p), 1) + (a = _tmp_100_rule(p), 1) // [star_named_expression ',' star_named_expressions?] && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8257,11 +8257,11 @@ group_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_101_rule(p)) + (a = _tmp_101_rule(p)) // yield_expr | named_expression && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = a; @@ -8301,13 +8301,13 @@ genexp_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8330,7 +8330,7 @@ genexp_rule(Parser *p) { // invalid_comprehension void *invalid_comprehension_var; if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { res = invalid_comprehension_var; @@ -8365,11 +8365,11 @@ set_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = expressions_list_rule(p)) + (a = expressions_list_rule(p)) // expressions_list && - (literal_1 = _PyPegen_expect_token(p, 26)) + (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8417,13 +8417,13 @@ setcomp_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 26)) + (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8446,7 +8446,7 @@ setcomp_rule(Parser *p) { // invalid_comprehension void *invalid_comprehension_var; if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { res = invalid_comprehension_var; @@ -8481,11 +8481,11 @@ dict_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = kvpairs_rule(p), 1) + (a = kvpairs_rule(p), 1) // kvpairs? && - (literal_1 = _PyPegen_expect_token(p, 26)) + (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8533,13 +8533,13 @@ dictcomp_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = kvpair_rule(p)) + (a = kvpair_rule(p)) // kvpair && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 26)) + (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8578,9 +8578,9 @@ kvpairs_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_102_rule(p)) + (a = _gather_102_rule(p)) // ','.kvpair+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -8610,9 +8610,9 @@ kvpair_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { res = _PyPegen_key_value_pair ( p , NULL , a ); @@ -8629,11 +8629,11 @@ kvpair_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { res = _PyPegen_key_value_pair ( p , a , b ); @@ -8662,7 +8662,7 @@ for_if_clauses_rule(Parser *p) { // for_if_clause+ asdl_seq * _loop1_104_var; if ( - (_loop1_104_var = _loop1_104_rule(p)) + (_loop1_104_var = _loop1_104_rule(p)) // for_if_clause+ ) { res = _loop1_104_var; @@ -8694,17 +8694,17 @@ for_if_clause_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 517)) + (keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (a = star_targets_rule(p)) + (a = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (b = disjunction_rule(p)) + (b = disjunction_rule(p)) // disjunction && - (c = _loop0_105_rule(p)) + (c = _loop0_105_rule(p)) // (('if' disjunction))* ) { res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); @@ -8723,15 +8723,15 @@ for_if_clause_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 517)) + (keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (a = star_targets_rule(p)) + (a = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (b = disjunction_rule(p)) + (b = disjunction_rule(p)) // disjunction && - (c = _loop0_106_rule(p)) + (c = _loop0_106_rule(p)) // (('if' disjunction))* ) { res = _Py_comprehension ( a , b , c , 0 , p -> arena ); @@ -8770,11 +8770,11 @@ yield_expr_rule(Parser *p) Token * keyword; Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 504)) + (keyword = _PyPegen_expect_token(p, 504)) // token='yield' && - (keyword_1 = _PyPegen_expect_token(p, 514)) + (keyword_1 = _PyPegen_expect_token(p, 514)) // token='from' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8798,9 +8798,9 @@ yield_expr_rule(Parser *p) void *a; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 504)) + (keyword = _PyPegen_expect_token(p, 504)) // token='yield' && - (a = star_expressions_rule(p), 1) + (a = star_expressions_rule(p), 1) // star_expressions? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8841,11 +8841,11 @@ arguments_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = args_rule(p)) + (a = args_rule(p)) // args && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { res = a; @@ -8860,7 +8860,7 @@ arguments_rule(Parser *p) { // incorrect_arguments void *incorrect_arguments_var; if ( - (incorrect_arguments_var = incorrect_arguments_rule(p)) + (incorrect_arguments_var = incorrect_arguments_rule(p)) // incorrect_arguments ) { res = incorrect_arguments_var; @@ -8895,9 +8895,9 @@ args_rule(Parser *p) expr_ty a; void *b; if ( - (a = starred_expression_rule(p)) + (a = starred_expression_rule(p)) // starred_expression && - (b = _tmp_107_rule(p), 1) + (b = _tmp_107_rule(p), 1) // [',' args] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8920,7 +8920,7 @@ args_rule(Parser *p) { // kwargs asdl_seq* a; if ( - (a = kwargs_rule(p)) + (a = kwargs_rule(p)) // kwargs ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8944,9 +8944,9 @@ args_rule(Parser *p) expr_ty a; void *b; if ( - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (b = _tmp_108_rule(p), 1) + (b = _tmp_108_rule(p), 1) // [',' args] ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -8988,11 +8988,11 @@ kwargs_rule(Parser *p) asdl_seq * b; Token * literal; if ( - (a = _gather_109_rule(p)) + (a = _gather_109_rule(p)) // ','.kwarg_or_starred+ && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _gather_111_rule(p)) + (b = _gather_111_rule(p)) // ','.kwarg_or_double_starred+ ) { res = _PyPegen_join_sequences ( p , a , b ); @@ -9007,7 +9007,7 @@ kwargs_rule(Parser *p) { // ','.kwarg_or_starred+ asdl_seq * _gather_113_var; if ( - (_gather_113_var = _gather_113_rule(p)) + (_gather_113_var = _gather_113_rule(p)) // ','.kwarg_or_starred+ ) { res = _gather_113_var; @@ -9018,7 +9018,7 @@ kwargs_rule(Parser *p) { // ','.kwarg_or_double_starred+ asdl_seq * _gather_115_var; if ( - (_gather_115_var = _gather_115_rule(p)) + (_gather_115_var = _gather_115_rule(p)) // ','.kwarg_or_double_starred+ ) { res = _gather_115_var; @@ -9052,9 +9052,9 @@ starred_expression_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9101,11 +9101,11 @@ kwarg_or_starred_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9128,7 +9128,7 @@ kwarg_or_starred_rule(Parser *p) { // starred_expression expr_ty a; if ( - (a = starred_expression_rule(p)) + (a = starred_expression_rule(p)) // starred_expression ) { res = _PyPegen_keyword_or_starred ( p , a , 0 ); @@ -9167,11 +9167,11 @@ kwarg_or_double_starred_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9195,9 +9195,9 @@ kwarg_or_double_starred_rule(Parser *p) expr_ty a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9242,9 +9242,9 @@ star_targets_rule(Parser *p) { // star_target !',' expr_ty a; if ( - (a = star_target_rule(p)) + (a = star_target_rule(p)) // star_target && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { res = a; @@ -9262,11 +9262,11 @@ star_targets_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = star_target_rule(p)) + (a = star_target_rule(p)) // star_target && - (b = _loop0_117_rule(p)) + (b = _loop0_117_rule(p)) // ((',' star_target))* && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9305,9 +9305,9 @@ star_targets_seq_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_118_rule(p)) + (a = _gather_118_rule(p)) // ','.star_target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -9351,9 +9351,9 @@ star_target_rule(Parser *p) void *a; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = _tmp_120_rule(p)) + (a = _tmp_120_rule(p)) // !'*' star_target ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9378,11 +9378,11 @@ star_target_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9410,13 +9410,13 @@ star_target_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9441,7 +9441,7 @@ star_target_rule(Parser *p) { // star_atom expr_ty star_atom_var; if ( - (star_atom_var = star_atom_rule(p)) + (star_atom_var = star_atom_rule(p)) // star_atom ) { res = star_atom_var; @@ -9479,7 +9479,7 @@ star_atom_rule(Parser *p) { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { res = _PyPegen_set_expr_context ( p , a , Store ); @@ -9496,11 +9496,11 @@ star_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = star_target_rule(p)) + (a = star_target_rule(p)) // star_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = _PyPegen_set_expr_context ( p , a , Store ); @@ -9517,11 +9517,11 @@ star_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = star_targets_seq_rule(p), 1) + (a = star_targets_seq_rule(p), 1) // star_targets_seq? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9546,11 +9546,11 @@ star_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = star_targets_seq_rule(p), 1) + (a = star_targets_seq_rule(p), 1) // star_targets_seq? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9590,7 +9590,7 @@ inside_paren_ann_assign_target_rule(Parser *p) { // ann_assign_subscript_attribute_target expr_ty ann_assign_subscript_attribute_target_var; if ( - (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) + (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target ) { res = ann_assign_subscript_attribute_target_var; @@ -9601,7 +9601,7 @@ inside_paren_ann_assign_target_rule(Parser *p) { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { res = _PyPegen_set_expr_context ( p , a , Store ); @@ -9618,11 +9618,11 @@ inside_paren_ann_assign_target_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = inside_paren_ann_assign_target_rule(p)) + (a = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = a; @@ -9663,11 +9663,11 @@ ann_assign_subscript_attribute_target_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9695,13 +9695,13 @@ ann_assign_subscript_attribute_target_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9742,9 +9742,9 @@ del_targets_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_121_rule(p)) + (a = _gather_121_rule(p)) // ','.del_target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -9788,11 +9788,11 @@ del_target_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9820,13 +9820,13 @@ del_target_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -9851,7 +9851,7 @@ del_target_rule(Parser *p) { // del_t_atom expr_ty del_t_atom_var; if ( - (del_t_atom_var = del_t_atom_rule(p)) + (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom ) { res = del_t_atom_var; @@ -9885,7 +9885,7 @@ del_t_atom_rule(Parser *p) { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { res = _PyPegen_set_expr_context ( p , a , Del ); @@ -9902,11 +9902,11 @@ del_t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = del_target_rule(p)) + (a = del_target_rule(p)) // del_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = _PyPegen_set_expr_context ( p , a , Del ); @@ -9923,11 +9923,11 @@ del_t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = del_targets_rule(p), 1) + (a = del_targets_rule(p), 1) // del_targets? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9952,11 +9952,11 @@ del_t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = del_targets_rule(p), 1) + (a = del_targets_rule(p), 1) // del_targets? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9995,9 +9995,9 @@ targets_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_123_rule(p)) + (a = _gather_123_rule(p)) // ','.target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { res = a; @@ -10041,11 +10041,11 @@ target_rule(Parser *p) expr_ty b; Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -10073,13 +10073,13 @@ target_rule(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) @@ -10104,7 +10104,7 @@ target_rule(Parser *p) { // t_atom expr_ty t_atom_var; if ( - (t_atom_var = t_atom_rule(p)) + (t_atom_var = t_atom_rule(p)) // t_atom ) { res = t_atom_var; @@ -10170,11 +10170,11 @@ t_primary_raw(Parser *p) expr_ty b; Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(1, t_lookahead_rule, p) ) @@ -10202,13 +10202,13 @@ t_primary_raw(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(1, t_lookahead_rule, p) ) @@ -10234,9 +10234,9 @@ t_primary_raw(Parser *p) expr_ty a; expr_ty b; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (b = genexp_rule(p)) + (b = genexp_rule(p)) // genexp && _PyPegen_lookahead(1, t_lookahead_rule, p) ) @@ -10264,13 +10264,13 @@ t_primary_raw(Parser *p) Token * literal; Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = arguments_rule(p), 1) + (b = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && _PyPegen_lookahead(1, t_lookahead_rule, p) ) @@ -10295,7 +10295,7 @@ t_primary_raw(Parser *p) { // atom &t_lookahead expr_ty a; if ( - (a = atom_rule(p)) + (a = atom_rule(p)) // atom && _PyPegen_lookahead(1, t_lookahead_rule, p) ) @@ -10326,7 +10326,7 @@ t_lookahead_rule(Parser *p) { // '(' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' ) { res = literal; @@ -10337,7 +10337,7 @@ t_lookahead_rule(Parser *p) { // '[' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' ) { res = literal; @@ -10348,7 +10348,7 @@ t_lookahead_rule(Parser *p) { // '.' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' ) { res = literal; @@ -10381,7 +10381,7 @@ t_atom_rule(Parser *p) { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { res = _PyPegen_set_expr_context ( p , a , Store ); @@ -10398,11 +10398,11 @@ t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = target_rule(p)) + (a = target_rule(p)) // target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = _PyPegen_set_expr_context ( p , a , Store ); @@ -10419,11 +10419,11 @@ t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = targets_rule(p), 1) + (b = targets_rule(p), 1) // targets? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -10448,11 +10448,11 @@ t_atom_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = targets_rule(p), 1) + (b = targets_rule(p), 1) // targets? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { Token *token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -10494,11 +10494,11 @@ incorrect_arguments_rule(Parser *p) Token * literal; Token * literal_1; if ( - (args_var = args_rule(p)) + (args_var = args_rule(p)) // args && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) + (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' ) { res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); @@ -10517,13 +10517,13 @@ incorrect_arguments_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - (for_if_clauses_var = for_if_clauses_rule(p)) + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (opt_var = _tmp_125_rule(p), 1) + (opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] ) { res = RAISE_SYNTAX_ERROR ( "Generator expression must be parenthesized" ); @@ -10540,11 +10540,11 @@ incorrect_arguments_rule(Parser *p) expr_ty args_var; Token * literal; if ( - (a = args_rule(p)) + (a = args_rule(p)) // args && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (args_var = args_rule(p)) + (args_var = args_rule(p)) // args ) { res = _PyPegen_arguments_parsing_error ( p , a ); @@ -10575,11 +10575,11 @@ invalid_named_expression_rule(Parser *p) expr_ty expression_var; Token * literal; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 53)) + (literal = _PyPegen_expect_token(p, 53)) // token=':=' && - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression ) { res = RAISE_SYNTAX_ERROR ( "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); @@ -10613,9 +10613,9 @@ invalid_assignment_rule(Parser *p) expr_ty list_var; Token * literal; if ( - (list_var = list_rule(p)) + (list_var = list_rule(p)) // list && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' ) { res = RAISE_SYNTAX_ERROR ( "only single target (not list) can be annotated" ); @@ -10631,9 +10631,9 @@ invalid_assignment_rule(Parser *p) Token * literal; expr_ty tuple_var; if ( - (tuple_var = tuple_rule(p)) + (tuple_var = tuple_rule(p)) // tuple && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' ) { res = RAISE_SYNTAX_ERROR ( "only single target (not tuple) can be annotated" ); @@ -10652,13 +10652,13 @@ invalid_assignment_rule(Parser *p) void *opt_var; UNUSED(opt_var); // Silence compiler warnings if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (expression_var_1 = expression_rule(p)) + (expression_var_1 = expression_rule(p)) // expression && - (opt_var = _tmp_126_rule(p), 1) + (opt_var = _tmp_126_rule(p), 1) // ['=' annotated_rhs] ) { res = RAISE_SYNTAX_ERROR ( "illegal target for annotation" ); @@ -10675,11 +10675,11 @@ invalid_assignment_rule(Parser *p) void *_tmp_128_var; expr_ty a; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (_tmp_127_var = _tmp_127_rule(p)) + (_tmp_127_var = _tmp_127_rule(p)) // '=' | augassign && - (_tmp_128_var = _tmp_128_rule(p)) + (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions ) { res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); @@ -10708,9 +10708,9 @@ invalid_block_rule(Parser *p) { // NEWLINE !INDENT Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT ) { res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); @@ -10742,13 +10742,13 @@ invalid_comprehension_rule(Parser *p) asdl_seq* for_if_clauses_var; Token * literal; if ( - (_tmp_129_var = _tmp_129_rule(p)) + (_tmp_129_var = _tmp_129_rule(p)) // '[' | '(' | '{' && - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - (for_if_clauses_var = for_if_clauses_rule(p)) + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { res = RAISE_SYNTAX_ERROR ( "iterable unpacking cannot be used in comprehension" ); @@ -10780,11 +10780,11 @@ invalid_parameters_rule(Parser *p) void *_tmp_131_var; arg_ty param_no_default_var; if ( - (_loop0_130_var = _loop0_130_rule(p)) + (_loop0_130_var = _loop0_130_rule(p)) // param_no_default* && - (_tmp_131_var = _tmp_131_rule(p)) + (_tmp_131_var = _tmp_131_rule(p)) // slash_with_default | param_with_default+ && - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); @@ -10814,9 +10814,9 @@ invalid_star_etc_rule(Parser *p) void *_tmp_132_var; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_132_var = _tmp_132_rule(p)) + (_tmp_132_var = _tmp_132_rule(p)) // ')' | ',' (')' | '**') ) { res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); @@ -10846,9 +10846,9 @@ invalid_lambda_star_etc_rule(Parser *p) void *_tmp_133_var; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_133_var = _tmp_133_rule(p)) + (_tmp_133_var = _tmp_133_rule(p)) // ':' | ',' (':' | '**') ) { res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); @@ -10881,15 +10881,15 @@ invalid_double_type_comments_rule(Parser *p) Token * type_comment_var; Token * type_comment_var_1; if ( - (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) + (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (type_comment_var_1 = _PyPegen_expect_token(p, TYPE_COMMENT)) + (type_comment_var_1 = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && - (newline_var_1 = _PyPegen_expect_token(p, NEWLINE)) + (newline_var_1 = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (indent_var = _PyPegen_expect_token(p, INDENT)) + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); @@ -10926,7 +10926,7 @@ _loop0_1_rule(Parser *p) { // NEWLINE Token * newline_var; while ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = newline_var; @@ -10975,7 +10975,7 @@ _loop0_2_rule(Parser *p) { // NEWLINE Token * newline_var; while ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = newline_var; @@ -11025,9 +11025,9 @@ _loop0_4_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { res = elem; @@ -11074,9 +11074,9 @@ _gather_3_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_4_rule(p)) + (seq = _loop0_4_rule(p)) // _loop0_4 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -11110,9 +11110,9 @@ _loop0_6_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { res = elem; @@ -11159,9 +11159,9 @@ _gather_5_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_6_rule(p)) + (seq = _loop0_6_rule(p)) // _loop0_6 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -11195,9 +11195,9 @@ _loop0_8_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { res = elem; @@ -11244,9 +11244,9 @@ _gather_7_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_8_rule(p)) + (seq = _loop0_8_rule(p)) // _loop0_8 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -11280,9 +11280,9 @@ _loop0_10_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { res = elem; @@ -11329,9 +11329,9 @@ _gather_9_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_10_rule(p)) + (seq = _loop0_10_rule(p)) // _loop0_10 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -11364,7 +11364,7 @@ _loop1_11_rule(Parser *p) { // statement asdl_seq* statement_var; while ( - (statement_var = statement_rule(p)) + (statement_var = statement_rule(p)) // statement ) { res = statement_var; @@ -11418,9 +11418,9 @@ _loop0_13_rule(Parser *p) stmt_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 13)) + (literal = _PyPegen_expect_token(p, 13)) // token=';' && - (elem = small_stmt_rule(p)) + (elem = small_stmt_rule(p)) // small_stmt ) { res = elem; @@ -11467,9 +11467,9 @@ _gather_12_rule(Parser *p) stmt_ty elem; asdl_seq * seq; if ( - (elem = small_stmt_rule(p)) + (elem = small_stmt_rule(p)) // small_stmt && - (seq = _loop0_13_rule(p)) + (seq = _loop0_13_rule(p)) // _loop0_13 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -11494,7 +11494,7 @@ _tmp_14_rule(Parser *p) { // 'import' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 513)) + (keyword = _PyPegen_expect_token(p, 513)) // token='import' ) { res = keyword; @@ -11505,7 +11505,7 @@ _tmp_14_rule(Parser *p) { // 'from' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (keyword = _PyPegen_expect_token(p, 514)) // token='from' ) { res = keyword; @@ -11530,7 +11530,7 @@ _tmp_15_rule(Parser *p) { // 'def' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 522)) + (keyword = _PyPegen_expect_token(p, 522)) // token='def' ) { res = keyword; @@ -11541,7 +11541,7 @@ _tmp_15_rule(Parser *p) { // '@' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 49)) + (literal = _PyPegen_expect_token(p, 49)) // token='@' ) { res = literal; @@ -11552,7 +11552,7 @@ _tmp_15_rule(Parser *p) { // ASYNC Token * async_var; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { res = async_var; @@ -11577,7 +11577,7 @@ _tmp_16_rule(Parser *p) { // 'class' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 523)) + (keyword = _PyPegen_expect_token(p, 523)) // token='class' ) { res = keyword; @@ -11588,7 +11588,7 @@ _tmp_16_rule(Parser *p) { // '@' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 49)) + (literal = _PyPegen_expect_token(p, 49)) // token='@' ) { res = literal; @@ -11613,7 +11613,7 @@ _tmp_17_rule(Parser *p) { // 'with' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 519)) + (keyword = _PyPegen_expect_token(p, 519)) // token='with' ) { res = keyword; @@ -11624,7 +11624,7 @@ _tmp_17_rule(Parser *p) { // ASYNC Token * async_var; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { res = async_var; @@ -11649,7 +11649,7 @@ _tmp_18_rule(Parser *p) { // 'for' Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 517)) + (keyword = _PyPegen_expect_token(p, 517)) // token='for' ) { res = keyword; @@ -11660,7 +11660,7 @@ _tmp_18_rule(Parser *p) { // ASYNC Token * async_var; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { res = async_var; @@ -11686,9 +11686,9 @@ _tmp_19_rule(Parser *p) expr_ty d; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (d = annotated_rhs_rule(p)) + (d = annotated_rhs_rule(p)) // annotated_rhs ) { res = d; @@ -11719,11 +11719,11 @@ _tmp_20_rule(Parser *p) Token * literal; Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = inside_paren_ann_assign_target_rule(p)) + (b = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = b; @@ -11738,7 +11738,7 @@ _tmp_20_rule(Parser *p) { // ann_assign_subscript_attribute_target expr_ty ann_assign_subscript_attribute_target_var; if ( - (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) + (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target ) { res = ann_assign_subscript_attribute_target_var; @@ -11764,9 +11764,9 @@ _tmp_21_rule(Parser *p) expr_ty d; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (d = annotated_rhs_rule(p)) + (d = annotated_rhs_rule(p)) // annotated_rhs ) { res = d; @@ -11803,7 +11803,7 @@ _loop1_22_rule(Parser *p) { // (star_targets '=') void *_tmp_134_var; while ( - (_tmp_134_var = _tmp_134_rule(p)) + (_tmp_134_var = _tmp_134_rule(p)) // star_targets '=' ) { res = _tmp_134_var; @@ -11848,7 +11848,7 @@ _tmp_23_rule(Parser *p) { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { res = yield_expr_var; @@ -11859,7 +11859,7 @@ _tmp_23_rule(Parser *p) { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { res = star_expressions_var; @@ -11884,7 +11884,7 @@ _tmp_24_rule(Parser *p) { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { res = yield_expr_var; @@ -11895,7 +11895,7 @@ _tmp_24_rule(Parser *p) { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { res = star_expressions_var; @@ -11929,9 +11929,9 @@ _loop0_26_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME ) { res = elem; @@ -11978,9 +11978,9 @@ _gather_25_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME && - (seq = _loop0_26_rule(p)) + (seq = _loop0_26_rule(p)) // _loop0_26 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12014,9 +12014,9 @@ _loop0_28_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME ) { res = elem; @@ -12063,9 +12063,9 @@ _gather_27_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME && - (seq = _loop0_28_rule(p)) + (seq = _loop0_28_rule(p)) // _loop0_28 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12091,9 +12091,9 @@ _tmp_29_rule(Parser *p) Token * literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { res = z; @@ -12130,7 +12130,7 @@ _loop0_30_rule(Parser *p) { // ('.' | '...') void *_tmp_135_var; while ( - (_tmp_135_var = _tmp_135_rule(p)) + (_tmp_135_var = _tmp_135_rule(p)) // '.' | '...' ) { res = _tmp_135_var; @@ -12179,7 +12179,7 @@ _loop1_31_rule(Parser *p) { // ('.' | '...') void *_tmp_136_var; while ( - (_tmp_136_var = _tmp_136_rule(p)) + (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' ) { res = _tmp_136_var; @@ -12233,9 +12233,9 @@ _loop0_33_rule(Parser *p) alias_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = import_from_as_name_rule(p)) + (elem = import_from_as_name_rule(p)) // import_from_as_name ) { res = elem; @@ -12282,9 +12282,9 @@ _gather_32_rule(Parser *p) alias_ty elem; asdl_seq * seq; if ( - (elem = import_from_as_name_rule(p)) + (elem = import_from_as_name_rule(p)) // import_from_as_name && - (seq = _loop0_33_rule(p)) + (seq = _loop0_33_rule(p)) // _loop0_33 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12310,9 +12310,9 @@ _tmp_34_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = _PyPegen_name_token(p)) + (z = _PyPegen_name_token(p)) // NAME ) { res = z; @@ -12350,9 +12350,9 @@ _loop0_36_rule(Parser *p) alias_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = dotted_as_name_rule(p)) + (elem = dotted_as_name_rule(p)) // dotted_as_name ) { res = elem; @@ -12399,9 +12399,9 @@ _gather_35_rule(Parser *p) alias_ty elem; asdl_seq * seq; if ( - (elem = dotted_as_name_rule(p)) + (elem = dotted_as_name_rule(p)) // dotted_as_name && - (seq = _loop0_36_rule(p)) + (seq = _loop0_36_rule(p)) // _loop0_36 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12427,9 +12427,9 @@ _tmp_37_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = _PyPegen_name_token(p)) + (z = _PyPegen_name_token(p)) // NAME ) { res = z; @@ -12467,9 +12467,9 @@ _loop0_39_rule(Parser *p) withitem_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { res = elem; @@ -12516,9 +12516,9 @@ _gather_38_rule(Parser *p) withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_39_rule(p)) + (seq = _loop0_39_rule(p)) // _loop0_39 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12552,9 +12552,9 @@ _loop0_41_rule(Parser *p) withitem_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { res = elem; @@ -12601,9 +12601,9 @@ _gather_40_rule(Parser *p) withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_41_rule(p)) + (seq = _loop0_41_rule(p)) // _loop0_41 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12637,9 +12637,9 @@ _loop0_43_rule(Parser *p) withitem_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { res = elem; @@ -12686,9 +12686,9 @@ _gather_42_rule(Parser *p) withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_43_rule(p)) + (seq = _loop0_43_rule(p)) // _loop0_43 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12722,9 +12722,9 @@ _loop0_45_rule(Parser *p) withitem_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { res = elem; @@ -12771,9 +12771,9 @@ _gather_44_rule(Parser *p) withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_45_rule(p)) + (seq = _loop0_45_rule(p)) // _loop0_45 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -12799,9 +12799,9 @@ _tmp_46_rule(Parser *p) Token * keyword; expr_ty t; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (t = target_rule(p)) + (t = target_rule(p)) // target ) { res = t; @@ -12838,7 +12838,7 @@ _loop1_47_rule(Parser *p) { // except_block excepthandler_ty except_block_var; while ( - (except_block_var = except_block_rule(p)) + (except_block_var = except_block_rule(p)) // except_block ) { res = except_block_var; @@ -12884,9 +12884,9 @@ _tmp_48_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = target_rule(p)) + (z = target_rule(p)) // target ) { res = z; @@ -12916,9 +12916,9 @@ _tmp_49_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { res = z; @@ -12948,9 +12948,9 @@ _tmp_50_rule(Parser *p) Token * literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 51)) + (literal = _PyPegen_expect_token(p, 51)) // token='->' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { res = z; @@ -12980,9 +12980,9 @@ _tmp_51_rule(Parser *p) Token * literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 51)) + (literal = _PyPegen_expect_token(p, 51)) // token='->' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { res = z; @@ -13012,9 +13012,9 @@ _tmp_52_rule(Parser *p) Token * indent_var; Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (indent_var = _PyPegen_expect_token(p, INDENT)) + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { res = _PyPegen_dummy_name(p, newline_var, indent_var); @@ -13047,7 +13047,7 @@ _loop0_53_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13096,7 +13096,7 @@ _loop0_54_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13145,7 +13145,7 @@ _loop0_55_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13194,7 +13194,7 @@ _loop1_56_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13247,7 +13247,7 @@ _loop0_57_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13296,7 +13296,7 @@ _loop1_58_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13349,7 +13349,7 @@ _loop1_59_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13402,7 +13402,7 @@ _loop1_60_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13455,7 +13455,7 @@ _loop0_61_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13504,7 +13504,7 @@ _loop1_62_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13557,7 +13557,7 @@ _loop0_63_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -13606,7 +13606,7 @@ _loop1_64_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -13659,7 +13659,7 @@ _loop0_65_rule(Parser *p) { // param_maybe_default NameDefaultPair* param_maybe_default_var; while ( - (param_maybe_default_var = param_maybe_default_rule(p)) + (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default ) { res = param_maybe_default_var; @@ -13708,7 +13708,7 @@ _loop1_66_rule(Parser *p) { // param_maybe_default NameDefaultPair* param_maybe_default_var; while ( - (param_maybe_default_var = param_maybe_default_rule(p)) + (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default ) { res = param_maybe_default_var; @@ -13761,7 +13761,7 @@ _loop1_67_rule(Parser *p) { // ('@' named_expression NEWLINE) void *_tmp_137_var; while ( - (_tmp_137_var = _tmp_137_rule(p)) + (_tmp_137_var = _tmp_137_rule(p)) // '@' named_expression NEWLINE ) { res = _tmp_137_var; @@ -13808,11 +13808,11 @@ _tmp_68_rule(Parser *p) Token * literal_1; void *z; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' && - (z = arguments_rule(p), 1) + (z = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { res = z; @@ -13850,9 +13850,9 @@ _loop0_70_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = star_expression_rule(p)) + (elem = star_expression_rule(p)) // star_expression ) { res = elem; @@ -13899,9 +13899,9 @@ _gather_69_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = star_expression_rule(p)) + (elem = star_expression_rule(p)) // star_expression && - (seq = _loop0_70_rule(p)) + (seq = _loop0_70_rule(p)) // _loop0_70 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -13934,7 +13934,7 @@ _loop1_71_rule(Parser *p) { // (',' star_expression) void *_tmp_138_var; while ( - (_tmp_138_var = _tmp_138_rule(p)) + (_tmp_138_var = _tmp_138_rule(p)) // ',' star_expression ) { res = _tmp_138_var; @@ -13988,9 +13988,9 @@ _loop0_73_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = star_named_expression_rule(p)) + (elem = star_named_expression_rule(p)) // star_named_expression ) { res = elem; @@ -14037,9 +14037,9 @@ _gather_72_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = star_named_expression_rule(p)) + (elem = star_named_expression_rule(p)) // star_named_expression && - (seq = _loop0_73_rule(p)) + (seq = _loop0_73_rule(p)) // _loop0_73 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -14072,7 +14072,7 @@ _loop1_74_rule(Parser *p) { // (',' expression) void *_tmp_139_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) + (_tmp_139_var = _tmp_139_rule(p)) // ',' expression ) { res = _tmp_139_var; @@ -14125,7 +14125,7 @@ _loop0_75_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14174,7 +14174,7 @@ _loop0_76_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14223,7 +14223,7 @@ _loop0_77_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14272,7 +14272,7 @@ _loop1_78_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14325,7 +14325,7 @@ _loop0_79_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14374,7 +14374,7 @@ _loop1_80_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14427,7 +14427,7 @@ _loop1_81_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14480,7 +14480,7 @@ _loop1_82_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14533,7 +14533,7 @@ _loop0_83_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14582,7 +14582,7 @@ _loop1_84_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14635,7 +14635,7 @@ _loop0_85_rule(Parser *p) { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { res = lambda_param_no_default_var; @@ -14684,7 +14684,7 @@ _loop1_86_rule(Parser *p) { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { res = lambda_param_with_default_var; @@ -14737,7 +14737,7 @@ _loop0_87_rule(Parser *p) { // lambda_param_maybe_default NameDefaultPair* lambda_param_maybe_default_var; while ( - (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { res = lambda_param_maybe_default_var; @@ -14786,7 +14786,7 @@ _loop1_88_rule(Parser *p) { // lambda_param_maybe_default NameDefaultPair* lambda_param_maybe_default_var; while ( - (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { res = lambda_param_maybe_default_var; @@ -14839,7 +14839,7 @@ _loop1_89_rule(Parser *p) { // ('or' conjunction) void *_tmp_140_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) + (_tmp_140_var = _tmp_140_rule(p)) // 'or' conjunction ) { res = _tmp_140_var; @@ -14892,7 +14892,7 @@ _loop1_90_rule(Parser *p) { // ('and' inversion) void *_tmp_141_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) + (_tmp_141_var = _tmp_141_rule(p)) // 'and' inversion ) { res = _tmp_141_var; @@ -14945,7 +14945,7 @@ _loop1_91_rule(Parser *p) { // compare_op_bitwise_or_pair CmpopExprPair* compare_op_bitwise_or_pair_var; while ( - (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) + (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair ) { res = compare_op_bitwise_or_pair_var; @@ -14990,7 +14990,7 @@ _tmp_92_rule(Parser *p) { // '!=' Token * tok; if ( - (tok = _PyPegen_expect_token(p, 28)) + (tok = _PyPegen_expect_token(p, 28)) // token='!=' ) { res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; @@ -15028,9 +15028,9 @@ _loop0_94_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = slice_rule(p)) + (elem = slice_rule(p)) // slice ) { res = elem; @@ -15077,9 +15077,9 @@ _gather_93_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = slice_rule(p)) + (elem = slice_rule(p)) // slice && - (seq = _loop0_94_rule(p)) + (seq = _loop0_94_rule(p)) // _loop0_94 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15105,9 +15105,9 @@ _tmp_95_rule(Parser *p) void *d; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' && - (d = expression_rule(p), 1) + (d = expression_rule(p), 1) // expression? ) { res = d; @@ -15136,7 +15136,7 @@ _tmp_96_rule(Parser *p) { // tuple expr_ty tuple_var; if ( - (tuple_var = tuple_rule(p)) + (tuple_var = tuple_rule(p)) // tuple ) { res = tuple_var; @@ -15147,7 +15147,7 @@ _tmp_96_rule(Parser *p) { // group expr_ty group_var; if ( - (group_var = group_rule(p)) + (group_var = group_rule(p)) // group ) { res = group_var; @@ -15158,7 +15158,7 @@ _tmp_96_rule(Parser *p) { // genexp expr_ty genexp_var; if ( - (genexp_var = genexp_rule(p)) + (genexp_var = genexp_rule(p)) // genexp ) { res = genexp_var; @@ -15183,7 +15183,7 @@ _tmp_97_rule(Parser *p) { // list expr_ty list_var; if ( - (list_var = list_rule(p)) + (list_var = list_rule(p)) // list ) { res = list_var; @@ -15194,7 +15194,7 @@ _tmp_97_rule(Parser *p) { // listcomp expr_ty listcomp_var; if ( - (listcomp_var = listcomp_rule(p)) + (listcomp_var = listcomp_rule(p)) // listcomp ) { res = listcomp_var; @@ -15219,7 +15219,7 @@ _tmp_98_rule(Parser *p) { // dict expr_ty dict_var; if ( - (dict_var = dict_rule(p)) + (dict_var = dict_rule(p)) // dict ) { res = dict_var; @@ -15230,7 +15230,7 @@ _tmp_98_rule(Parser *p) { // set expr_ty set_var; if ( - (set_var = set_rule(p)) + (set_var = set_rule(p)) // set ) { res = set_var; @@ -15241,7 +15241,7 @@ _tmp_98_rule(Parser *p) { // dictcomp expr_ty dictcomp_var; if ( - (dictcomp_var = dictcomp_rule(p)) + (dictcomp_var = dictcomp_rule(p)) // dictcomp ) { res = dictcomp_var; @@ -15252,7 +15252,7 @@ _tmp_98_rule(Parser *p) { // setcomp expr_ty setcomp_var; if ( - (setcomp_var = setcomp_rule(p)) + (setcomp_var = setcomp_rule(p)) // setcomp ) { res = setcomp_var; @@ -15285,7 +15285,7 @@ _loop1_99_rule(Parser *p) { // STRING expr_ty string_var; while ( - (string_var = _PyPegen_string_token(p)) + (string_var = _PyPegen_string_token(p)) // STRING ) { res = string_var; @@ -15332,11 +15332,11 @@ _tmp_100_rule(Parser *p) expr_ty y; void *z; if ( - (y = star_named_expression_rule(p)) + (y = star_named_expression_rule(p)) // star_named_expression && - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (z = star_named_expressions_rule(p), 1) + (z = star_named_expressions_rule(p), 1) // star_named_expressions? ) { res = _PyPegen_seq_insert_in_front ( p , y , z ); @@ -15365,7 +15365,7 @@ _tmp_101_rule(Parser *p) { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { res = yield_expr_var; @@ -15376,7 +15376,7 @@ _tmp_101_rule(Parser *p) { // named_expression expr_ty named_expression_var; if ( - (named_expression_var = named_expression_rule(p)) + (named_expression_var = named_expression_rule(p)) // named_expression ) { res = named_expression_var; @@ -15410,9 +15410,9 @@ _loop0_103_rule(Parser *p) KeyValuePair* elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kvpair_rule(p)) + (elem = kvpair_rule(p)) // kvpair ) { res = elem; @@ -15459,9 +15459,9 @@ _gather_102_rule(Parser *p) KeyValuePair* elem; asdl_seq * seq; if ( - (elem = kvpair_rule(p)) + (elem = kvpair_rule(p)) // kvpair && - (seq = _loop0_103_rule(p)) + (seq = _loop0_103_rule(p)) // _loop0_103 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15494,7 +15494,7 @@ _loop1_104_rule(Parser *p) { // for_if_clause comprehension_ty for_if_clause_var; while ( - (for_if_clause_var = for_if_clause_rule(p)) + (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause ) { res = for_if_clause_var; @@ -15547,7 +15547,7 @@ _loop0_105_rule(Parser *p) { // ('if' disjunction) void *_tmp_142_var; while ( - (_tmp_142_var = _tmp_142_rule(p)) + (_tmp_142_var = _tmp_142_rule(p)) // 'if' disjunction ) { res = _tmp_142_var; @@ -15596,7 +15596,7 @@ _loop0_106_rule(Parser *p) { // ('if' disjunction) void *_tmp_143_var; while ( - (_tmp_143_var = _tmp_143_rule(p)) + (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction ) { res = _tmp_143_var; @@ -15638,9 +15638,9 @@ _tmp_107_rule(Parser *p) expr_ty c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = args_rule(p)) + (c = args_rule(p)) // args ) { res = c; @@ -15670,9 +15670,9 @@ _tmp_108_rule(Parser *p) expr_ty c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = args_rule(p)) + (c = args_rule(p)) // args ) { res = c; @@ -15710,9 +15710,9 @@ _loop0_110_rule(Parser *p) KeywordOrStarred* elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred ) { res = elem; @@ -15759,9 +15759,9 @@ _gather_109_rule(Parser *p) KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred && - (seq = _loop0_110_rule(p)) + (seq = _loop0_110_rule(p)) // _loop0_110 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15795,9 +15795,9 @@ _loop0_112_rule(Parser *p) KeywordOrStarred* elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred ) { res = elem; @@ -15844,9 +15844,9 @@ _gather_111_rule(Parser *p) KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred && - (seq = _loop0_112_rule(p)) + (seq = _loop0_112_rule(p)) // _loop0_112 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15880,9 +15880,9 @@ _loop0_114_rule(Parser *p) KeywordOrStarred* elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred ) { res = elem; @@ -15929,9 +15929,9 @@ _gather_113_rule(Parser *p) KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred && - (seq = _loop0_114_rule(p)) + (seq = _loop0_114_rule(p)) // _loop0_114 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -15965,9 +15965,9 @@ _loop0_116_rule(Parser *p) KeywordOrStarred* elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred ) { res = elem; @@ -16014,9 +16014,9 @@ _gather_115_rule(Parser *p) KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred && - (seq = _loop0_116_rule(p)) + (seq = _loop0_116_rule(p)) // _loop0_116 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -16049,7 +16049,7 @@ _loop0_117_rule(Parser *p) { // (',' star_target) void *_tmp_144_var; while ( - (_tmp_144_var = _tmp_144_rule(p)) + (_tmp_144_var = _tmp_144_rule(p)) // ',' star_target ) { res = _tmp_144_var; @@ -16099,9 +16099,9 @@ _loop0_119_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = star_target_rule(p)) + (elem = star_target_rule(p)) // star_target ) { res = elem; @@ -16148,9 +16148,9 @@ _gather_118_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = star_target_rule(p)) + (elem = star_target_rule(p)) // star_target && - (seq = _loop0_119_rule(p)) + (seq = _loop0_119_rule(p)) // _loop0_119 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -16175,9 +16175,9 @@ _tmp_120_rule(Parser *p) { // !'*' star_target expr_ty star_target_var; if ( - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*' && - (star_target_var = star_target_rule(p)) + (star_target_var = star_target_rule(p)) // star_target ) { res = star_target_var; @@ -16211,9 +16211,9 @@ _loop0_122_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = del_target_rule(p)) + (elem = del_target_rule(p)) // del_target ) { res = elem; @@ -16260,9 +16260,9 @@ _gather_121_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = del_target_rule(p)) + (elem = del_target_rule(p)) // del_target && - (seq = _loop0_122_rule(p)) + (seq = _loop0_122_rule(p)) // _loop0_122 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -16296,9 +16296,9 @@ _loop0_124_rule(Parser *p) expr_ty elem; Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = target_rule(p)) + (elem = target_rule(p)) // target ) { res = elem; @@ -16345,9 +16345,9 @@ _gather_123_rule(Parser *p) expr_ty elem; asdl_seq * seq; if ( - (elem = target_rule(p)) + (elem = target_rule(p)) // target && - (seq = _loop0_124_rule(p)) + (seq = _loop0_124_rule(p)) // _loop0_124 ) { res = _PyPegen_seq_insert_in_front(p, elem, seq); @@ -16372,7 +16372,7 @@ _tmp_125_rule(Parser *p) { // args expr_ty args_var; if ( - (args_var = args_rule(p)) + (args_var = args_rule(p)) // args ) { res = args_var; @@ -16384,9 +16384,9 @@ _tmp_125_rule(Parser *p) expr_ty expression_var; asdl_seq* for_if_clauses_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - (for_if_clauses_var = for_if_clauses_rule(p)) + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); @@ -16412,9 +16412,9 @@ _tmp_126_rule(Parser *p) expr_ty annotated_rhs_var; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' && - (annotated_rhs_var = annotated_rhs_rule(p)) + (annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs ) { res = _PyPegen_dummy_name(p, literal, annotated_rhs_var); @@ -16439,7 +16439,7 @@ _tmp_127_rule(Parser *p) { // '=' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' ) { res = literal; @@ -16450,7 +16450,7 @@ _tmp_127_rule(Parser *p) { // augassign AugOperator* augassign_var; if ( - (augassign_var = augassign_rule(p)) + (augassign_var = augassign_rule(p)) // augassign ) { res = augassign_var; @@ -16475,7 +16475,7 @@ _tmp_128_rule(Parser *p) { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { res = yield_expr_var; @@ -16486,7 +16486,7 @@ _tmp_128_rule(Parser *p) { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { res = star_expressions_var; @@ -16511,7 +16511,7 @@ _tmp_129_rule(Parser *p) { // '[' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 9)) + (literal = _PyPegen_expect_token(p, 9)) // token='[' ) { res = literal; @@ -16522,7 +16522,7 @@ _tmp_129_rule(Parser *p) { // '(' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 7)) + (literal = _PyPegen_expect_token(p, 7)) // token='(' ) { res = literal; @@ -16533,7 +16533,7 @@ _tmp_129_rule(Parser *p) { // '{' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 25)) + (literal = _PyPegen_expect_token(p, 25)) // token='{' ) { res = literal; @@ -16566,7 +16566,7 @@ _loop0_130_rule(Parser *p) { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { res = param_no_default_var; @@ -16607,7 +16607,7 @@ _tmp_131_rule(Parser *p) { // slash_with_default SlashWithDefault* slash_with_default_var; if ( - (slash_with_default_var = slash_with_default_rule(p)) + (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { res = slash_with_default_var; @@ -16618,7 +16618,7 @@ _tmp_131_rule(Parser *p) { // param_with_default+ asdl_seq * _loop1_145_var; if ( - (_loop1_145_var = _loop1_145_rule(p)) + (_loop1_145_var = _loop1_145_rule(p)) // param_with_default+ ) { res = _loop1_145_var; @@ -16643,7 +16643,7 @@ _tmp_132_rule(Parser *p) { // ')' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 8)) + (literal = _PyPegen_expect_token(p, 8)) // token=')' ) { res = literal; @@ -16655,9 +16655,9 @@ _tmp_132_rule(Parser *p) void *_tmp_146_var; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_146_var = _tmp_146_rule(p)) + (_tmp_146_var = _tmp_146_rule(p)) // ')' | '**' ) { res = _PyPegen_dummy_name(p, literal, _tmp_146_var); @@ -16682,7 +16682,7 @@ _tmp_133_rule(Parser *p) { // ':' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' ) { res = literal; @@ -16694,9 +16694,9 @@ _tmp_133_rule(Parser *p) void *_tmp_147_var; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_147_var = _tmp_147_rule(p)) + (_tmp_147_var = _tmp_147_rule(p)) // ':' | '**' ) { res = _PyPegen_dummy_name(p, literal, _tmp_147_var); @@ -16722,9 +16722,9 @@ _tmp_134_rule(Parser *p) Token * literal; expr_ty z; if ( - (z = star_targets_rule(p)) + (z = star_targets_rule(p)) // star_targets && - (literal = _PyPegen_expect_token(p, 22)) + (literal = _PyPegen_expect_token(p, 22)) // token='=' ) { res = z; @@ -16753,7 +16753,7 @@ _tmp_135_rule(Parser *p) { // '.' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' ) { res = literal; @@ -16764,7 +16764,7 @@ _tmp_135_rule(Parser *p) { // '...' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 52)) + (literal = _PyPegen_expect_token(p, 52)) // token='...' ) { res = literal; @@ -16789,7 +16789,7 @@ _tmp_136_rule(Parser *p) { // '.' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 23)) + (literal = _PyPegen_expect_token(p, 23)) // token='.' ) { res = literal; @@ -16800,7 +16800,7 @@ _tmp_136_rule(Parser *p) { // '...' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 52)) + (literal = _PyPegen_expect_token(p, 52)) // token='...' ) { res = literal; @@ -16827,11 +16827,11 @@ _tmp_137_rule(Parser *p) Token * literal; Token * newline_var; if ( - (literal = _PyPegen_expect_token(p, 49)) + (literal = _PyPegen_expect_token(p, 49)) // token='@' && - (f = named_expression_rule(p)) + (f = named_expression_rule(p)) // named_expression && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { res = f; @@ -16861,9 +16861,9 @@ _tmp_138_rule(Parser *p) expr_ty c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = star_expression_rule(p)) + (c = star_expression_rule(p)) // star_expression ) { res = c; @@ -16893,9 +16893,9 @@ _tmp_139_rule(Parser *p) expr_ty c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = expression_rule(p)) + (c = expression_rule(p)) // expression ) { res = c; @@ -16925,9 +16925,9 @@ _tmp_140_rule(Parser *p) expr_ty c; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 532)) + (keyword = _PyPegen_expect_token(p, 532)) // token='or' && - (c = conjunction_rule(p)) + (c = conjunction_rule(p)) // conjunction ) { res = c; @@ -16957,9 +16957,9 @@ _tmp_141_rule(Parser *p) expr_ty c; Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 533)) + (keyword = _PyPegen_expect_token(p, 533)) // token='and' && - (c = inversion_rule(p)) + (c = inversion_rule(p)) // inversion ) { res = c; @@ -16989,9 +16989,9 @@ _tmp_142_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (z = disjunction_rule(p)) + (z = disjunction_rule(p)) // disjunction ) { res = z; @@ -17021,9 +17021,9 @@ _tmp_143_rule(Parser *p) Token * keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (z = disjunction_rule(p)) + (z = disjunction_rule(p)) // disjunction ) { res = z; @@ -17053,9 +17053,9 @@ _tmp_144_rule(Parser *p) expr_ty c; Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = star_target_rule(p)) + (c = star_target_rule(p)) // star_target ) { res = c; @@ -17092,7 +17092,7 @@ _loop1_145_rule(Parser *p) { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { res = param_with_default_var; @@ -17137,7 +17137,7 @@ _tmp_146_rule(Parser *p) { // ')' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 8)) + (literal = _PyPegen_expect_token(p, 8)) // token=')' ) { res = literal; @@ -17148,7 +17148,7 @@ _tmp_146_rule(Parser *p) { // '**' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' ) { res = literal; @@ -17173,7 +17173,7 @@ _tmp_147_rule(Parser *p) { // ':' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (literal = _PyPegen_expect_token(p, 11)) // token=':' ) { res = literal; @@ -17184,7 +17184,7 @@ _tmp_147_rule(Parser *p) { // '**' Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (literal = _PyPegen_expect_token(p, 35)) // token='**' ) { res = literal; diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 40004e7875278d..3bf6d9ed6a3abb 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -68,6 +68,7 @@ class FunctionCall: return_type: Optional[str] = None nodetype: Optional[NodeTypes] = None force_true: bool = False + comment: Optional[str] = None def __str__(self) -> str: parts = [] @@ -78,6 +79,8 @@ def __str__(self) -> str: parts.append(", 1") if self.assigned_variable: parts = ["(", self.assigned_variable, " = ", *parts, ")"] + if self.comment: + parts.append(f" // {self.comment}") return "".join(parts) @@ -103,6 +106,7 @@ def keyword_helper(self, keyword: str) -> FunctionCall: arguments=["p", self.keyword_cache[keyword]], return_type="Token *", nodetype=NodeTypes.KEYWORD, + comment=f"token='{keyword}'", ) def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: @@ -115,6 +119,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: arguments=["p"], nodetype=BASE_NODETYPES[name], return_type="expr_ty", + comment=name, ) return FunctionCall( assigned_variable=f"{name.lower()}_var", @@ -122,6 +127,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: arguments=["p", name], nodetype=NodeTypes.GENERIC_TOKEN, return_type="Token *", + comment=f"token='{name}'", ) type = None @@ -134,6 +140,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type=type, + comment=f"{node}" ) def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: @@ -149,6 +156,7 @@ def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: arguments=["p", type], nodetype=NodeTypes.GENERIC_TOKEN, return_type="Token *", + comment=f"token='{val}'", ) def visit_Rhs(self, node: Rhs) -> FunctionCall: @@ -168,6 +176,7 @@ def can_we_inline(node: Rhs) -> int: name = self.gen.name_node(node) self.cache[node] = FunctionCall( assigned_variable=f"{name}_var", function=f"{name}_rule", arguments=["p"], + comment=f"{node}" ) return self.cache[node] @@ -190,6 +199,7 @@ def lookahead_call_helper(self, node: Lookahead, positive: int) -> FunctionCall: function=f"_PyPegen_lookahead_with_int", arguments=[positive, call.function, *call.arguments], return_type="int", + comment=f"token={node.node}", ) else: return FunctionCall( @@ -211,6 +221,7 @@ def visit_Opt(self, node: Opt) -> FunctionCall: function=call.function, arguments=call.arguments, force_true=True, + comment=f"{node}" ) def visit_Repeat0(self, node: Repeat0) -> FunctionCall: @@ -222,6 +233,7 @@ def visit_Repeat0(self, node: Repeat0) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type="asdl_seq *", + comment=f"{node}", ) return self.cache[node] @@ -234,6 +246,7 @@ def visit_Repeat1(self, node: Repeat1) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type="asdl_seq *", + comment=f"{node}", ) return self.cache[node] @@ -246,6 +259,7 @@ def visit_Gather(self, node: Gather) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type="asdl_seq *", + comment=f"{node}", ) return self.cache[node] From c1c7d8ead9eb214a6149a43e31a3213c52448877 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Thu, 7 May 2020 04:09:33 +0300 Subject: [PATCH 032/115] bpo-40397: Refactor typing._GenericAlias (GH-19719) Make the design more object-oriented. Split _GenericAlias on two almost independent classes: for special generic aliases like List and for parametrized generic aliases like List[int]. Add specialized subclasses for Callable, Callable[...], Tuple and Union[...]. --- Lib/typing.py | 389 +++++++++++++++++++++++++------------------------- 1 file changed, 198 insertions(+), 191 deletions(-) diff --git a/Lib/typing.py b/Lib/typing.py index f3cd280a09e271..681ab6d21e0a32 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -181,34 +181,11 @@ def _collect_type_vars(types): for t in types: if isinstance(t, TypeVar) and t not in tvars: tvars.append(t) - if ((isinstance(t, _GenericAlias) and not t._special) - or isinstance(t, GenericAlias)): + if isinstance(t, (_GenericAlias, GenericAlias)): tvars.extend([t for t in t.__parameters__ if t not in tvars]) return tuple(tvars) -def _subs_tvars(tp, tvars, subs): - """Substitute type variables 'tvars' with substitutions 'subs'. - These two must have the same length. - """ - if not isinstance(tp, (_GenericAlias, GenericAlias)): - return tp - new_args = list(tp.__args__) - for a, arg in enumerate(tp.__args__): - if isinstance(arg, TypeVar): - for i, tvar in enumerate(tvars): - if arg == tvar: - new_args[a] = subs[i] - else: - new_args[a] = _subs_tvars(arg, tvars, subs) - if tp.__origin__ is Union: - return Union[tuple(new_args)] - if isinstance(tp, GenericAlias): - return GenericAlias(tp.__origin__, tuple(new_args)) - else: - return tp.copy_with(tuple(new_args)) - - def _check_generic(cls, parameters): """Check correct count for parameters of a generic cls (internal helper). This gives a nice error message in case of count mismatch. @@ -229,7 +206,7 @@ def _remove_dups_flatten(parameters): # Flatten out Union[Union[...], ...]. params = [] for p in parameters: - if isinstance(p, _GenericAlias) and p.__origin__ is Union: + if isinstance(p, _UnionGenericAlias): params.extend(p.__args__) elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: params.extend(p[1:]) @@ -274,18 +251,14 @@ def _eval_type(t, globalns, localns): """ if isinstance(t, ForwardRef): return t._evaluate(globalns, localns) - if isinstance(t, _GenericAlias): + if isinstance(t, (_GenericAlias, GenericAlias)): ev_args = tuple(_eval_type(a, globalns, localns) for a in t.__args__) if ev_args == t.__args__: return t - res = t.copy_with(ev_args) - res._special = t._special - return res - if isinstance(t, GenericAlias): - ev_args = tuple(_eval_type(a, globalns, localns) for a in t.__args__) - if ev_args == t.__args__: - return t - return GenericAlias(t.__origin__, ev_args) + if isinstance(t, GenericAlias): + return GenericAlias(t.__origin__, ev_args) + else: + return t.copy_with(ev_args) return t @@ -300,6 +273,7 @@ def __init_subclass__(self, /, *args, **kwds): class _Immutable: """Mixin to indicate that object should not be copied.""" + __slots__ = () def __copy__(self): return self @@ -446,7 +420,7 @@ def Union(self, parameters): parameters = _remove_dups_flatten(parameters) if len(parameters) == 1: return parameters[0] - return _GenericAlias(self, parameters) + return _UnionGenericAlias(self, parameters) @_SpecialForm def Optional(self, parameters): @@ -579,7 +553,7 @@ def longest(x: A, y: A) -> A: """ __slots__ = ('__name__', '__bound__', '__constraints__', - '__covariant__', '__contravariant__') + '__covariant__', '__contravariant__', '__dict__') def __init__(self, name, *constraints, bound=None, covariant=False, contravariant=False): @@ -629,23 +603,10 @@ def __reduce__(self): # e.g., Dict[T, int].__args__ == (T, int). -# Mapping from non-generic type names that have a generic alias in typing -# but with a different name. -_normalize_alias = {'list': 'List', - 'tuple': 'Tuple', - 'dict': 'Dict', - 'set': 'Set', - 'frozenset': 'FrozenSet', - 'deque': 'Deque', - 'defaultdict': 'DefaultDict', - 'type': 'Type', - 'Set': 'AbstractSet'} - def _is_dunder(attr): return attr.startswith('__') and attr.endswith('__') - -class _GenericAlias(_Final, _root=True): +class _BaseGenericAlias(_Final, _root=True): """The central part of internal API. This represents a generic version of type 'origin' with type arguments 'params'. @@ -654,12 +615,8 @@ class _GenericAlias(_Final, _root=True): have 'name' always set. If 'inst' is False, then the alias can't be instantiated, this is used by e.g. typing.List and typing.Dict. """ - def __init__(self, origin, params, *, inst=True, special=False, name=None): + def __init__(self, origin, params, *, inst=True, name=None): self._inst = inst - self._special = special - if special and name is None: - orig_name = origin.__name__ - name = _normalize_alias.get(orig_name, orig_name) self._name = name if not isinstance(params, tuple): params = (params,) @@ -671,68 +628,20 @@ def __init__(self, origin, params, *, inst=True, special=False, name=None): self.__slots__ = None # This is not documented. if not name: self.__module__ = origin.__module__ - if special: - self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}' - - @_tp_cache - def __getitem__(self, params): - if self.__origin__ in (Generic, Protocol): - # Can't subscript Generic[...] or Protocol[...]. - raise TypeError(f"Cannot subscript already-subscripted {self}") - if not isinstance(params, tuple): - params = (params,) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - _check_generic(self, params) - return _subs_tvars(self, self.__parameters__, params) - - def copy_with(self, params): - # We don't copy self._special. - return _GenericAlias(self.__origin__, params, name=self._name, inst=self._inst) - - def __repr__(self): - if (self.__origin__ == Union and len(self.__args__) == 2 - and type(None) in self.__args__): - if self.__args__[0] is not type(None): - arg = self.__args__[0] - else: - arg = self.__args__[1] - return (f'typing.Optional[{_type_repr(arg)}]') - if (self._name != 'Callable' or - len(self.__args__) == 2 and self.__args__[0] is Ellipsis): - if self._name: - name = 'typing.' + self._name - else: - name = _type_repr(self.__origin__) - if not self._special: - args = f'[{", ".join([_type_repr(a) for a in self.__args__])}]' - else: - args = '' - return (f'{name}{args}') - if self._special: - return 'typing.Callable' - return (f'typing.Callable' - f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], ' - f'{_type_repr(self.__args__[-1])}]') def __eq__(self, other): - if not isinstance(other, _GenericAlias): + if not isinstance(other, _BaseGenericAlias): return NotImplemented - if self.__origin__ != other.__origin__: - return False - if self.__origin__ is Union and other.__origin__ is Union: - return frozenset(self.__args__) == frozenset(other.__args__) - return self.__args__ == other.__args__ + return (self.__origin__ == other.__origin__ + and self.__args__ == other.__args__) def __hash__(self): - if self.__origin__ is Union: - return hash((Union, frozenset(self.__args__))) return hash((self.__origin__, self.__args__)) def __call__(self, *args, **kwargs): if not self._inst: raise TypeError(f"Type {self._name} cannot be instantiated; " - f"use {self._name.lower()}() instead") + f"use {self.__origin__.__name__}() instead") result = self.__origin__(*args, **kwargs) try: result.__orig_class__ = self @@ -741,23 +650,16 @@ def __call__(self, *args, **kwargs): return result def __mro_entries__(self, bases): - if self._name: # generic version of an ABC or built-in class - res = [] - if self.__origin__ not in bases: - res.append(self.__origin__) - i = bases.index(self) - if not any(isinstance(b, _GenericAlias) or issubclass(b, Generic) - for b in bases[i+1:]): - res.append(Generic) - return tuple(res) - if self.__origin__ is Generic: - if Protocol in bases: - return () - i = bases.index(self) - for b in bases[i+1:]: - if isinstance(b, _GenericAlias) and b is not self: - return () - return (self.__origin__,) + res = [] + if self.__origin__ not in bases: + res.append(self.__origin__) + i = bases.index(self) + for b in bases[i+1:]: + if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic): + break + else: + res.append(Generic) + return tuple(res) def __getattr__(self, attr): # We are careful for copy and pickle. @@ -767,7 +669,7 @@ def __getattr__(self, attr): raise AttributeError(attr) def __setattr__(self, attr, val): - if _is_dunder(attr) or attr in ('_name', '_inst', '_special'): + if _is_dunder(attr) or attr in ('_name', '_inst'): super().__setattr__(attr, val) else: setattr(self.__origin__, attr, val) @@ -776,39 +678,124 @@ def __instancecheck__(self, obj): return self.__subclasscheck__(type(obj)) def __subclasscheck__(self, cls): - if self._special: - if not isinstance(cls, _GenericAlias): - return issubclass(cls, self.__origin__) - if cls._special: - return issubclass(cls.__origin__, self.__origin__) raise TypeError("Subscripted generics cannot be used with" " class and instance checks") - def __reduce__(self): - if self._special: - return self._name +class _GenericAlias(_BaseGenericAlias, _root=True): + @_tp_cache + def __getitem__(self, params): + if self.__origin__ in (Generic, Protocol): + # Can't subscript Generic[...] or Protocol[...]. + raise TypeError(f"Cannot subscript already-subscripted {self}") + if not isinstance(params, tuple): + params = (params,) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + _check_generic(self, params) + + subst = dict(zip(self.__parameters__, params)) + new_args = [] + for arg in self.__args__: + if isinstance(arg, TypeVar): + arg = subst[arg] + elif isinstance(arg, (_BaseGenericAlias, GenericAlias)): + subargs = tuple(subst[x] for x in arg.__parameters__) + arg = arg[subargs] + new_args.append(arg) + return self.copy_with(tuple(new_args)) + + def copy_with(self, params): + return self.__class__(self.__origin__, params, name=self._name, inst=self._inst) + + def __repr__(self): + if self._name: + name = 'typing.' + self._name + else: + name = _type_repr(self.__origin__) + args = ", ".join([_type_repr(a) for a in self.__args__]) + return f'{name}[{args}]' + + def __reduce__(self): if self._name: origin = globals()[self._name] else: origin = self.__origin__ - if (origin is Callable and - not (len(self.__args__) == 2 and self.__args__[0] is Ellipsis)): - args = list(self.__args__[:-1]), self.__args__[-1] - else: - args = tuple(self.__args__) - if len(args) == 1 and not isinstance(args[0], tuple): - args, = args + args = tuple(self.__args__) + if len(args) == 1 and not isinstance(args[0], tuple): + args, = args return operator.getitem, (origin, args) + def __mro_entries__(self, bases): + if self._name: # generic version of an ABC or built-in class + return super().__mro_entries__(bases) + if self.__origin__ is Generic: + if Protocol in bases: + return () + i = bases.index(self) + for b in bases[i+1:]: + if isinstance(b, _BaseGenericAlias) and b is not self: + return () + return (self.__origin__,) + + +class _SpecialGenericAlias(_BaseGenericAlias, _root=True): + def __init__(self, origin, params, *, inst=True, name=None): + if name is None: + name = origin.__name__ + super().__init__(origin, params, inst=inst, name=name) + self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}' + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + _check_generic(self, params) + assert self.__args__ == self.__parameters__ + return self.copy_with(params) + + def copy_with(self, params): + return _GenericAlias(self.__origin__, params, + name=self._name, inst=self._inst) + + def __repr__(self): + return 'typing.' + self._name + + def __subclasscheck__(self, cls): + if isinstance(cls, _SpecialGenericAlias): + return issubclass(cls.__origin__, self.__origin__) + if not isinstance(cls, _GenericAlias): + return issubclass(cls, self.__origin__) + return super().__subclasscheck__(cls) + + def __reduce__(self): + return self._name + + +class _CallableGenericAlias(_GenericAlias, _root=True): + def __repr__(self): + assert self._name == 'Callable' + if len(self.__args__) == 2 and self.__args__[0] is Ellipsis: + return super().__repr__() + return (f'typing.Callable' + f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], ' + f'{_type_repr(self.__args__[-1])}]') + + def __reduce__(self): + args = self.__args__ + if not (len(args) == 2 and args[0] is ...): + args = list(args[:-1]), args[-1] + return operator.getitem, (Callable, args) + + +class _CallableType(_SpecialGenericAlias, _root=True): + def copy_with(self, params): + return _CallableGenericAlias(self.__origin__, params, + name=self._name, inst=self._inst) -class _VariadicGenericAlias(_GenericAlias, _root=True): - """Same as _GenericAlias above but for variadic aliases. Currently, - this is used only by special internal aliases: Tuple and Callable. - """ def __getitem__(self, params): - if self._name != 'Callable' or not self._special: - return self.__getitem_inner__(params) if not isinstance(params, tuple) or len(params) != 2: raise TypeError("Callable must be used as " "Callable[[arg, ...], result].") @@ -824,29 +811,53 @@ def __getitem__(self, params): @_tp_cache def __getitem_inner__(self, params): - if self.__origin__ is tuple and self._special: - if params == (): - return self.copy_with((_TypingEmpty,)) - if not isinstance(params, tuple): - params = (params,) - if len(params) == 2 and params[1] is ...: - msg = "Tuple[t, ...]: t must be a type." - p = _type_check(params[0], msg) - return self.copy_with((p, _TypingEllipsis)) - msg = "Tuple[t0, t1, ...]: each t must be a type." - params = tuple(_type_check(p, msg) for p in params) - return self.copy_with(params) - if self.__origin__ is collections.abc.Callable and self._special: - args, result = params - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - if args is Ellipsis: - return self.copy_with((_TypingEllipsis, result)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - params = args + (result,) - return self.copy_with(params) - return super().__getitem__(params) + args, result = params + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + if args is Ellipsis: + return self.copy_with((_TypingEllipsis, result)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + params = args + (result,) + return self.copy_with(params) + + +class _TupleType(_SpecialGenericAlias, _root=True): + @_tp_cache + def __getitem__(self, params): + if params == (): + return self.copy_with((_TypingEmpty,)) + if not isinstance(params, tuple): + params = (params,) + if len(params) == 2 and params[1] is ...: + msg = "Tuple[t, ...]: t must be a type." + p = _type_check(params[0], msg) + return self.copy_with((p, _TypingEllipsis)) + msg = "Tuple[t0, t1, ...]: each t must be a type." + params = tuple(_type_check(p, msg) for p in params) + return self.copy_with(params) + + +class _UnionGenericAlias(_GenericAlias, _root=True): + def copy_with(self, params): + return Union[params] + + def __eq__(self, other): + if not isinstance(other, _UnionGenericAlias): + return NotImplemented + return set(self.__args__) == set(other.__args__) + + def __hash__(self): + return hash(frozenset(self.__args__)) + + def __repr__(self): + args = self.__args__ + if len(args) == 2: + if args[0] is type(None): + return f'typing.Optional[{_type_repr(args[1])}]' + elif args[1] is type(None): + return f'typing.Optional[{_type_repr(args[0])}]' + return super().__repr__() class Generic: @@ -1162,9 +1173,8 @@ def __reduce__(self): def __eq__(self, other): if not isinstance(other, _AnnotatedAlias): return NotImplemented - if self.__origin__ != other.__origin__: - return False - return self.__metadata__ == other.__metadata__ + return (self.__origin__ == other.__origin__ + and self.__metadata__ == other.__metadata__) def __hash__(self): return hash((self.__origin__, self.__metadata__)) @@ -1380,9 +1390,7 @@ def _strip_annotations(t): stripped_args = tuple(_strip_annotations(a) for a in t.__args__) if stripped_args == t.__args__: return t - res = t.copy_with(stripped_args) - res._special = t._special - return res + return t.copy_with(stripped_args) if isinstance(t, GenericAlias): stripped_args = tuple(_strip_annotations(a) for a in t.__args__) if stripped_args == t.__args__: @@ -1407,7 +1415,7 @@ def get_origin(tp): """ if isinstance(tp, _AnnotatedAlias): return Annotated - if isinstance(tp, (_GenericAlias, GenericAlias)): + if isinstance(tp, (_BaseGenericAlias, GenericAlias)): return tp.__origin__ if tp is Generic: return Generic @@ -1427,7 +1435,7 @@ def get_args(tp): """ if isinstance(tp, _AnnotatedAlias): return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, _GenericAlias) and not tp._special: + if isinstance(tp, _GenericAlias): res = tp.__args__ if tp.__origin__ is collections.abc.Callable and res[0] is not Ellipsis: res = (list(res[:-1]), res[-1]) @@ -1561,8 +1569,7 @@ class Other(Leaf): # Error reported by type checker # Various ABCs mimicking those in collections.abc. -def _alias(origin, params, inst=True): - return _GenericAlias(origin, params, special=True, inst=inst) +_alias = _SpecialGenericAlias Hashable = _alias(collections.abc.Hashable, ()) # Not generic. Awaitable = _alias(collections.abc.Awaitable, T_co) @@ -1575,7 +1582,7 @@ def _alias(origin, params, inst=True): Sized = _alias(collections.abc.Sized, ()) # Not generic. Container = _alias(collections.abc.Container, T_co) Collection = _alias(collections.abc.Collection, T_co) -Callable = _VariadicGenericAlias(collections.abc.Callable, (), special=True) +Callable = _CallableType(collections.abc.Callable, ()) Callable.__doc__ = \ """Callable type; Callable[[int], str] is a function of (int) -> str. @@ -1586,7 +1593,7 @@ def _alias(origin, params, inst=True): There is no syntax to indicate optional or keyword arguments, such function types are rarely used as callback types. """ -AbstractSet = _alias(collections.abc.Set, T_co) +AbstractSet = _alias(collections.abc.Set, T_co, name='AbstractSet') MutableSet = _alias(collections.abc.MutableSet, T) # NOTE: Mapping is only covariant in the value type. Mapping = _alias(collections.abc.Mapping, (KT, VT_co)) @@ -1594,7 +1601,7 @@ def _alias(origin, params, inst=True): Sequence = _alias(collections.abc.Sequence, T_co) MutableSequence = _alias(collections.abc.MutableSequence, T) ByteString = _alias(collections.abc.ByteString, ()) # Not generic -Tuple = _VariadicGenericAlias(tuple, (), inst=False, special=True) +Tuple = _TupleType(tuple, (), inst=False, name='Tuple') Tuple.__doc__ = \ """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. @@ -1604,24 +1611,24 @@ def _alias(origin, params, inst=True): To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. """ -List = _alias(list, T, inst=False) -Deque = _alias(collections.deque, T) -Set = _alias(set, T, inst=False) -FrozenSet = _alias(frozenset, T_co, inst=False) +List = _alias(list, T, inst=False, name='List') +Deque = _alias(collections.deque, T, name='Deque') +Set = _alias(set, T, inst=False, name='Set') +FrozenSet = _alias(frozenset, T_co, inst=False, name='FrozenSet') MappingView = _alias(collections.abc.MappingView, T_co) KeysView = _alias(collections.abc.KeysView, KT) ItemsView = _alias(collections.abc.ItemsView, (KT, VT_co)) ValuesView = _alias(collections.abc.ValuesView, VT_co) -ContextManager = _alias(contextlib.AbstractContextManager, T_co) -AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, T_co) -Dict = _alias(dict, (KT, VT), inst=False) -DefaultDict = _alias(collections.defaultdict, (KT, VT)) +ContextManager = _alias(contextlib.AbstractContextManager, T_co, name='ContextManager') +AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, T_co, name='AsyncContextManager') +Dict = _alias(dict, (KT, VT), inst=False, name='Dict') +DefaultDict = _alias(collections.defaultdict, (KT, VT), name='DefaultDict') OrderedDict = _alias(collections.OrderedDict, (KT, VT)) Counter = _alias(collections.Counter, T) ChainMap = _alias(collections.ChainMap, (KT, VT)) Generator = _alias(collections.abc.Generator, (T_co, T_contra, V_co)) AsyncGenerator = _alias(collections.abc.AsyncGenerator, (T_co, T_contra)) -Type = _alias(type, CT_co, inst=False) +Type = _alias(type, CT_co, inst=False, name='Type') Type.__doc__ = \ """A special construct usable to annotate class objects. From 3466922320d54a922cfe6d6d44e89e1cea4023ef Mon Sep 17 00:00:00 2001 From: Dong-hee Na Date: Thu, 7 May 2020 10:17:16 +0900 Subject: [PATCH 033/115] bpo-1635741: Port errno module to multiphase initialization (GH-19923) --- ...2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst | 1 + Modules/errnomodule.c | 645 +++++++++--------- 2 files changed, 336 insertions(+), 310 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst new file mode 100644 index 00000000000000..197eae97c3d1ab --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst @@ -0,0 +1 @@ +Port :mod:`errno` to multiphase initialization (:pep:`489`). diff --git a/Modules/errnomodule.c b/Modules/errnomodule.c index 06ed53a64dbdc1..d99bed45bd6a23 100644 --- a/Modules/errnomodule.c +++ b/Modules/errnomodule.c @@ -46,66 +46,57 @@ static PyMethodDef errno_methods[] = { /* Helper function doing the dictionary inserting */ -static void -_inscode(PyObject *d, PyObject *de, const char *name, int code) +static int +_add_errcode(PyObject *module_dict, PyObject *error_dict, const char *name_str, int code_int) { - PyObject *u = PyUnicode_FromString(name); - PyObject *v = PyLong_FromLong((long) code); - - /* Don't bother checking for errors; they'll be caught at the end - * of the module initialization function by the caller of - * initerrno(). - */ - if (u && v) { - /* insert in modules dict */ - PyDict_SetItem(d, u, v); - /* insert in errorcode dict */ - PyDict_SetItem(de, v, u); + PyObject *name = PyUnicode_FromString(name_str); + if (!name) { + return -1; } - Py_XDECREF(u); - Py_XDECREF(v); -} -PyDoc_STRVAR(errno__doc__, -"This module makes available standard errno system symbols.\n\ -\n\ -The value of each symbol is the corresponding integer value,\n\ -e.g., on most systems, errno.ENOENT equals the integer 2.\n\ -\n\ -The dictionary errno.errorcode maps numeric codes to symbol names,\n\ -e.g., errno.errorcode[2] could be the string 'ENOENT'.\n\ -\n\ -Symbols that are not relevant to the underlying system are not defined.\n\ -\n\ -To map error codes to error messages, use the function os.strerror(),\n\ -e.g. os.strerror(2) could return 'No such file or directory'."); + PyObject *code = PyLong_FromLong(code_int); + if (!code) { + Py_DECREF(name); + return -1; + } -static struct PyModuleDef errnomodule = { - PyModuleDef_HEAD_INIT, - "errno", - errno__doc__, - -1, - errno_methods, - NULL, - NULL, - NULL, - NULL -}; + int ret = -1; + /* insert in modules dict */ + if (PyDict_SetItem(module_dict, name, code) < 0) { + goto end; + } + /* insert in errorcode dict */ + if (PyDict_SetItem(error_dict, code, name) < 0) { + goto end; + } + ret = 0; +end: + Py_DECREF(name); + Py_DECREF(code); + return ret; +} -PyMODINIT_FUNC -PyInit_errno(void) +static int +errno_exec(PyObject *module) { - PyObject *m, *d, *de; - m = PyModule_Create(&errnomodule); - if (m == NULL) - return NULL; - d = PyModule_GetDict(m); - de = PyDict_New(); - if (!d || !de || PyDict_SetItemString(d, "errorcode", de) < 0) - return NULL; + PyObject *module_dict = PyModule_GetDict(module); + PyObject *error_dict = PyDict_New(); + if (!module_dict || !error_dict) { + return -1; + } + if (PyDict_SetItemString(module_dict, "errorcode", error_dict) < 0) { + Py_DECREF(error_dict); + return -1; + } /* Macro so I don't have to edit each and every line below... */ -#define inscode(d, ds, de, name, code, comment) _inscode(d, de, name, code) +#define add_errcode(name, code, comment) \ + do { \ + if (_add_errcode(module_dict, error_dict, name, code) < 0) { \ + Py_DECREF(error_dict); \ + return -1; \ + } \ + } while (0); /* * The names and comments are borrowed from linux/include/errno.h, @@ -116,820 +107,854 @@ PyInit_errno(void) */ #ifdef ENODEV - inscode(d, ds, de, "ENODEV", ENODEV, "No such device"); + add_errcode("ENODEV", ENODEV, "No such device"); #endif #ifdef ENOCSI - inscode(d, ds, de, "ENOCSI", ENOCSI, "No CSI structure available"); + add_errcode("ENOCSI", ENOCSI, "No CSI structure available"); #endif #ifdef EHOSTUNREACH - inscode(d, ds, de, "EHOSTUNREACH", EHOSTUNREACH, "No route to host"); + add_errcode("EHOSTUNREACH", EHOSTUNREACH, "No route to host"); #else #ifdef WSAEHOSTUNREACH - inscode(d, ds, de, "EHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); + add_errcode("EHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); #endif #endif #ifdef ENOMSG - inscode(d, ds, de, "ENOMSG", ENOMSG, "No message of desired type"); + add_errcode("ENOMSG", ENOMSG, "No message of desired type"); #endif #ifdef EUCLEAN - inscode(d, ds, de, "EUCLEAN", EUCLEAN, "Structure needs cleaning"); + add_errcode("EUCLEAN", EUCLEAN, "Structure needs cleaning"); #endif #ifdef EL2NSYNC - inscode(d, ds, de, "EL2NSYNC", EL2NSYNC, "Level 2 not synchronized"); + add_errcode("EL2NSYNC", EL2NSYNC, "Level 2 not synchronized"); #endif #ifdef EL2HLT - inscode(d, ds, de, "EL2HLT", EL2HLT, "Level 2 halted"); + add_errcode("EL2HLT", EL2HLT, "Level 2 halted"); #endif #ifdef ENODATA - inscode(d, ds, de, "ENODATA", ENODATA, "No data available"); + add_errcode("ENODATA", ENODATA, "No data available"); #endif #ifdef ENOTBLK - inscode(d, ds, de, "ENOTBLK", ENOTBLK, "Block device required"); + add_errcode("ENOTBLK", ENOTBLK, "Block device required"); #endif #ifdef ENOSYS - inscode(d, ds, de, "ENOSYS", ENOSYS, "Function not implemented"); + add_errcode("ENOSYS", ENOSYS, "Function not implemented"); #endif #ifdef EPIPE - inscode(d, ds, de, "EPIPE", EPIPE, "Broken pipe"); + add_errcode("EPIPE", EPIPE, "Broken pipe"); #endif #ifdef EINVAL - inscode(d, ds, de, "EINVAL", EINVAL, "Invalid argument"); + add_errcode("EINVAL", EINVAL, "Invalid argument"); #else #ifdef WSAEINVAL - inscode(d, ds, de, "EINVAL", WSAEINVAL, "Invalid argument"); + add_errcode("EINVAL", WSAEINVAL, "Invalid argument"); #endif #endif #ifdef EOVERFLOW - inscode(d, ds, de, "EOVERFLOW", EOVERFLOW, "Value too large for defined data type"); + add_errcode("EOVERFLOW", EOVERFLOW, "Value too large for defined data type"); #endif #ifdef EADV - inscode(d, ds, de, "EADV", EADV, "Advertise error"); + add_errcode("EADV", EADV, "Advertise error"); #endif #ifdef EINTR - inscode(d, ds, de, "EINTR", EINTR, "Interrupted system call"); + add_errcode("EINTR", EINTR, "Interrupted system call"); #else #ifdef WSAEINTR - inscode(d, ds, de, "EINTR", WSAEINTR, "Interrupted system call"); + add_errcode("EINTR", WSAEINTR, "Interrupted system call"); #endif #endif #ifdef EUSERS - inscode(d, ds, de, "EUSERS", EUSERS, "Too many users"); + add_errcode("EUSERS", EUSERS, "Too many users"); #else #ifdef WSAEUSERS - inscode(d, ds, de, "EUSERS", WSAEUSERS, "Too many users"); + add_errcode("EUSERS", WSAEUSERS, "Too many users"); #endif #endif #ifdef ENOTEMPTY - inscode(d, ds, de, "ENOTEMPTY", ENOTEMPTY, "Directory not empty"); + add_errcode("ENOTEMPTY", ENOTEMPTY, "Directory not empty"); #else #ifdef WSAENOTEMPTY - inscode(d, ds, de, "ENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); + add_errcode("ENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); #endif #endif #ifdef ENOBUFS - inscode(d, ds, de, "ENOBUFS", ENOBUFS, "No buffer space available"); + add_errcode("ENOBUFS", ENOBUFS, "No buffer space available"); #else #ifdef WSAENOBUFS - inscode(d, ds, de, "ENOBUFS", WSAENOBUFS, "No buffer space available"); + add_errcode("ENOBUFS", WSAENOBUFS, "No buffer space available"); #endif #endif #ifdef EPROTO - inscode(d, ds, de, "EPROTO", EPROTO, "Protocol error"); + add_errcode("EPROTO", EPROTO, "Protocol error"); #endif #ifdef EREMOTE - inscode(d, ds, de, "EREMOTE", EREMOTE, "Object is remote"); + add_errcode("EREMOTE", EREMOTE, "Object is remote"); #else #ifdef WSAEREMOTE - inscode(d, ds, de, "EREMOTE", WSAEREMOTE, "Object is remote"); + add_errcode("EREMOTE", WSAEREMOTE, "Object is remote"); #endif #endif #ifdef ENAVAIL - inscode(d, ds, de, "ENAVAIL", ENAVAIL, "No XENIX semaphores available"); + add_errcode("ENAVAIL", ENAVAIL, "No XENIX semaphores available"); #endif #ifdef ECHILD - inscode(d, ds, de, "ECHILD", ECHILD, "No child processes"); + add_errcode("ECHILD", ECHILD, "No child processes"); #endif #ifdef ELOOP - inscode(d, ds, de, "ELOOP", ELOOP, "Too many symbolic links encountered"); + add_errcode("ELOOP", ELOOP, "Too many symbolic links encountered"); #else #ifdef WSAELOOP - inscode(d, ds, de, "ELOOP", WSAELOOP, "Too many symbolic links encountered"); + add_errcode("ELOOP", WSAELOOP, "Too many symbolic links encountered"); #endif #endif #ifdef EXDEV - inscode(d, ds, de, "EXDEV", EXDEV, "Cross-device link"); + add_errcode("EXDEV", EXDEV, "Cross-device link"); #endif #ifdef E2BIG - inscode(d, ds, de, "E2BIG", E2BIG, "Arg list too long"); + add_errcode("E2BIG", E2BIG, "Arg list too long"); #endif #ifdef ESRCH - inscode(d, ds, de, "ESRCH", ESRCH, "No such process"); + add_errcode("ESRCH", ESRCH, "No such process"); #endif #ifdef EMSGSIZE - inscode(d, ds, de, "EMSGSIZE", EMSGSIZE, "Message too long"); + add_errcode("EMSGSIZE", EMSGSIZE, "Message too long"); #else #ifdef WSAEMSGSIZE - inscode(d, ds, de, "EMSGSIZE", WSAEMSGSIZE, "Message too long"); + add_errcode("EMSGSIZE", WSAEMSGSIZE, "Message too long"); #endif #endif #ifdef EAFNOSUPPORT - inscode(d, ds, de, "EAFNOSUPPORT", EAFNOSUPPORT, "Address family not supported by protocol"); + add_errcode("EAFNOSUPPORT", EAFNOSUPPORT, "Address family not supported by protocol"); #else #ifdef WSAEAFNOSUPPORT - inscode(d, ds, de, "EAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); + add_errcode("EAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); #endif #endif #ifdef EBADR - inscode(d, ds, de, "EBADR", EBADR, "Invalid request descriptor"); + add_errcode("EBADR", EBADR, "Invalid request descriptor"); #endif #ifdef EHOSTDOWN - inscode(d, ds, de, "EHOSTDOWN", EHOSTDOWN, "Host is down"); + add_errcode("EHOSTDOWN", EHOSTDOWN, "Host is down"); #else #ifdef WSAEHOSTDOWN - inscode(d, ds, de, "EHOSTDOWN", WSAEHOSTDOWN, "Host is down"); + add_errcode("EHOSTDOWN", WSAEHOSTDOWN, "Host is down"); #endif #endif #ifdef EPFNOSUPPORT - inscode(d, ds, de, "EPFNOSUPPORT", EPFNOSUPPORT, "Protocol family not supported"); + add_errcode("EPFNOSUPPORT", EPFNOSUPPORT, "Protocol family not supported"); #else #ifdef WSAEPFNOSUPPORT - inscode(d, ds, de, "EPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); + add_errcode("EPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); #endif #endif #ifdef ENOPROTOOPT - inscode(d, ds, de, "ENOPROTOOPT", ENOPROTOOPT, "Protocol not available"); + add_errcode("ENOPROTOOPT", ENOPROTOOPT, "Protocol not available"); #else #ifdef WSAENOPROTOOPT - inscode(d, ds, de, "ENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); + add_errcode("ENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); #endif #endif #ifdef EBUSY - inscode(d, ds, de, "EBUSY", EBUSY, "Device or resource busy"); + add_errcode("EBUSY", EBUSY, "Device or resource busy"); #endif #ifdef EWOULDBLOCK - inscode(d, ds, de, "EWOULDBLOCK", EWOULDBLOCK, "Operation would block"); + add_errcode("EWOULDBLOCK", EWOULDBLOCK, "Operation would block"); #else #ifdef WSAEWOULDBLOCK - inscode(d, ds, de, "EWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); + add_errcode("EWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); #endif #endif #ifdef EBADFD - inscode(d, ds, de, "EBADFD", EBADFD, "File descriptor in bad state"); + add_errcode("EBADFD", EBADFD, "File descriptor in bad state"); #endif #ifdef EDOTDOT - inscode(d, ds, de, "EDOTDOT", EDOTDOT, "RFS specific error"); + add_errcode("EDOTDOT", EDOTDOT, "RFS specific error"); #endif #ifdef EISCONN - inscode(d, ds, de, "EISCONN", EISCONN, "Transport endpoint is already connected"); + add_errcode("EISCONN", EISCONN, "Transport endpoint is already connected"); #else #ifdef WSAEISCONN - inscode(d, ds, de, "EISCONN", WSAEISCONN, "Transport endpoint is already connected"); + add_errcode("EISCONN", WSAEISCONN, "Transport endpoint is already connected"); #endif #endif #ifdef ENOANO - inscode(d, ds, de, "ENOANO", ENOANO, "No anode"); + add_errcode("ENOANO", ENOANO, "No anode"); #endif #ifdef ESHUTDOWN - inscode(d, ds, de, "ESHUTDOWN", ESHUTDOWN, "Cannot send after transport endpoint shutdown"); + add_errcode("ESHUTDOWN", ESHUTDOWN, "Cannot send after transport endpoint shutdown"); #else #ifdef WSAESHUTDOWN - inscode(d, ds, de, "ESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); + add_errcode("ESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); #endif #endif #ifdef ECHRNG - inscode(d, ds, de, "ECHRNG", ECHRNG, "Channel number out of range"); + add_errcode("ECHRNG", ECHRNG, "Channel number out of range"); #endif #ifdef ELIBBAD - inscode(d, ds, de, "ELIBBAD", ELIBBAD, "Accessing a corrupted shared library"); + add_errcode("ELIBBAD", ELIBBAD, "Accessing a corrupted shared library"); #endif #ifdef ENONET - inscode(d, ds, de, "ENONET", ENONET, "Machine is not on the network"); + add_errcode("ENONET", ENONET, "Machine is not on the network"); #endif #ifdef EBADE - inscode(d, ds, de, "EBADE", EBADE, "Invalid exchange"); + add_errcode("EBADE", EBADE, "Invalid exchange"); #endif #ifdef EBADF - inscode(d, ds, de, "EBADF", EBADF, "Bad file number"); + add_errcode("EBADF", EBADF, "Bad file number"); #else #ifdef WSAEBADF - inscode(d, ds, de, "EBADF", WSAEBADF, "Bad file number"); + add_errcode("EBADF", WSAEBADF, "Bad file number"); #endif #endif #ifdef EMULTIHOP - inscode(d, ds, de, "EMULTIHOP", EMULTIHOP, "Multihop attempted"); + add_errcode("EMULTIHOP", EMULTIHOP, "Multihop attempted"); #endif #ifdef EIO - inscode(d, ds, de, "EIO", EIO, "I/O error"); + add_errcode("EIO", EIO, "I/O error"); #endif #ifdef EUNATCH - inscode(d, ds, de, "EUNATCH", EUNATCH, "Protocol driver not attached"); + add_errcode("EUNATCH", EUNATCH, "Protocol driver not attached"); #endif #ifdef EPROTOTYPE - inscode(d, ds, de, "EPROTOTYPE", EPROTOTYPE, "Protocol wrong type for socket"); + add_errcode("EPROTOTYPE", EPROTOTYPE, "Protocol wrong type for socket"); #else #ifdef WSAEPROTOTYPE - inscode(d, ds, de, "EPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); + add_errcode("EPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); #endif #endif #ifdef ENOSPC - inscode(d, ds, de, "ENOSPC", ENOSPC, "No space left on device"); + add_errcode("ENOSPC", ENOSPC, "No space left on device"); #endif #ifdef ENOEXEC - inscode(d, ds, de, "ENOEXEC", ENOEXEC, "Exec format error"); + add_errcode("ENOEXEC", ENOEXEC, "Exec format error"); #endif #ifdef EALREADY - inscode(d, ds, de, "EALREADY", EALREADY, "Operation already in progress"); + add_errcode("EALREADY", EALREADY, "Operation already in progress"); #else #ifdef WSAEALREADY - inscode(d, ds, de, "EALREADY", WSAEALREADY, "Operation already in progress"); + add_errcode("EALREADY", WSAEALREADY, "Operation already in progress"); #endif #endif #ifdef ENETDOWN - inscode(d, ds, de, "ENETDOWN", ENETDOWN, "Network is down"); + add_errcode("ENETDOWN", ENETDOWN, "Network is down"); #else #ifdef WSAENETDOWN - inscode(d, ds, de, "ENETDOWN", WSAENETDOWN, "Network is down"); + add_errcode("ENETDOWN", WSAENETDOWN, "Network is down"); #endif #endif #ifdef ENOTNAM - inscode(d, ds, de, "ENOTNAM", ENOTNAM, "Not a XENIX named type file"); + add_errcode("ENOTNAM", ENOTNAM, "Not a XENIX named type file"); #endif #ifdef EACCES - inscode(d, ds, de, "EACCES", EACCES, "Permission denied"); + add_errcode("EACCES", EACCES, "Permission denied"); #else #ifdef WSAEACCES - inscode(d, ds, de, "EACCES", WSAEACCES, "Permission denied"); + add_errcode("EACCES", WSAEACCES, "Permission denied"); #endif #endif #ifdef ELNRNG - inscode(d, ds, de, "ELNRNG", ELNRNG, "Link number out of range"); + add_errcode("ELNRNG", ELNRNG, "Link number out of range"); #endif #ifdef EILSEQ - inscode(d, ds, de, "EILSEQ", EILSEQ, "Illegal byte sequence"); + add_errcode("EILSEQ", EILSEQ, "Illegal byte sequence"); #endif #ifdef ENOTDIR - inscode(d, ds, de, "ENOTDIR", ENOTDIR, "Not a directory"); + add_errcode("ENOTDIR", ENOTDIR, "Not a directory"); #endif #ifdef ENOTUNIQ - inscode(d, ds, de, "ENOTUNIQ", ENOTUNIQ, "Name not unique on network"); + add_errcode("ENOTUNIQ", ENOTUNIQ, "Name not unique on network"); #endif #ifdef EPERM - inscode(d, ds, de, "EPERM", EPERM, "Operation not permitted"); + add_errcode("EPERM", EPERM, "Operation not permitted"); #endif #ifdef EDOM - inscode(d, ds, de, "EDOM", EDOM, "Math argument out of domain of func"); + add_errcode("EDOM", EDOM, "Math argument out of domain of func"); #endif #ifdef EXFULL - inscode(d, ds, de, "EXFULL", EXFULL, "Exchange full"); + add_errcode("EXFULL", EXFULL, "Exchange full"); #endif #ifdef ECONNREFUSED - inscode(d, ds, de, "ECONNREFUSED", ECONNREFUSED, "Connection refused"); + add_errcode("ECONNREFUSED", ECONNREFUSED, "Connection refused"); #else #ifdef WSAECONNREFUSED - inscode(d, ds, de, "ECONNREFUSED", WSAECONNREFUSED, "Connection refused"); + add_errcode("ECONNREFUSED", WSAECONNREFUSED, "Connection refused"); #endif #endif #ifdef EISDIR - inscode(d, ds, de, "EISDIR", EISDIR, "Is a directory"); + add_errcode("EISDIR", EISDIR, "Is a directory"); #endif #ifdef EPROTONOSUPPORT - inscode(d, ds, de, "EPROTONOSUPPORT", EPROTONOSUPPORT, "Protocol not supported"); + add_errcode("EPROTONOSUPPORT", EPROTONOSUPPORT, "Protocol not supported"); #else #ifdef WSAEPROTONOSUPPORT - inscode(d, ds, de, "EPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); + add_errcode("EPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); #endif #endif #ifdef EROFS - inscode(d, ds, de, "EROFS", EROFS, "Read-only file system"); + add_errcode("EROFS", EROFS, "Read-only file system"); #endif #ifdef EADDRNOTAVAIL - inscode(d, ds, de, "EADDRNOTAVAIL", EADDRNOTAVAIL, "Cannot assign requested address"); + add_errcode("EADDRNOTAVAIL", EADDRNOTAVAIL, "Cannot assign requested address"); #else #ifdef WSAEADDRNOTAVAIL - inscode(d, ds, de, "EADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); + add_errcode("EADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); #endif #endif #ifdef EIDRM - inscode(d, ds, de, "EIDRM", EIDRM, "Identifier removed"); + add_errcode("EIDRM", EIDRM, "Identifier removed"); #endif #ifdef ECOMM - inscode(d, ds, de, "ECOMM", ECOMM, "Communication error on send"); + add_errcode("ECOMM", ECOMM, "Communication error on send"); #endif #ifdef ESRMNT - inscode(d, ds, de, "ESRMNT", ESRMNT, "Srmount error"); + add_errcode("ESRMNT", ESRMNT, "Srmount error"); #endif #ifdef EREMOTEIO - inscode(d, ds, de, "EREMOTEIO", EREMOTEIO, "Remote I/O error"); + add_errcode("EREMOTEIO", EREMOTEIO, "Remote I/O error"); #endif #ifdef EL3RST - inscode(d, ds, de, "EL3RST", EL3RST, "Level 3 reset"); + add_errcode("EL3RST", EL3RST, "Level 3 reset"); #endif #ifdef EBADMSG - inscode(d, ds, de, "EBADMSG", EBADMSG, "Not a data message"); + add_errcode("EBADMSG", EBADMSG, "Not a data message"); #endif #ifdef ENFILE - inscode(d, ds, de, "ENFILE", ENFILE, "File table overflow"); + add_errcode("ENFILE", ENFILE, "File table overflow"); #endif #ifdef ELIBMAX - inscode(d, ds, de, "ELIBMAX", ELIBMAX, "Attempting to link in too many shared libraries"); + add_errcode("ELIBMAX", ELIBMAX, "Attempting to link in too many shared libraries"); #endif #ifdef ESPIPE - inscode(d, ds, de, "ESPIPE", ESPIPE, "Illegal seek"); + add_errcode("ESPIPE", ESPIPE, "Illegal seek"); #endif #ifdef ENOLINK - inscode(d, ds, de, "ENOLINK", ENOLINK, "Link has been severed"); + add_errcode("ENOLINK", ENOLINK, "Link has been severed"); #endif #ifdef ENETRESET - inscode(d, ds, de, "ENETRESET", ENETRESET, "Network dropped connection because of reset"); + add_errcode("ENETRESET", ENETRESET, "Network dropped connection because of reset"); #else #ifdef WSAENETRESET - inscode(d, ds, de, "ENETRESET", WSAENETRESET, "Network dropped connection because of reset"); + add_errcode("ENETRESET", WSAENETRESET, "Network dropped connection because of reset"); #endif #endif #ifdef ETIMEDOUT - inscode(d, ds, de, "ETIMEDOUT", ETIMEDOUT, "Connection timed out"); + add_errcode("ETIMEDOUT", ETIMEDOUT, "Connection timed out"); #else #ifdef WSAETIMEDOUT - inscode(d, ds, de, "ETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); + add_errcode("ETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); #endif #endif #ifdef ENOENT - inscode(d, ds, de, "ENOENT", ENOENT, "No such file or directory"); + add_errcode("ENOENT", ENOENT, "No such file or directory"); #endif #ifdef EEXIST - inscode(d, ds, de, "EEXIST", EEXIST, "File exists"); + add_errcode("EEXIST", EEXIST, "File exists"); #endif #ifdef EDQUOT - inscode(d, ds, de, "EDQUOT", EDQUOT, "Quota exceeded"); + add_errcode("EDQUOT", EDQUOT, "Quota exceeded"); #else #ifdef WSAEDQUOT - inscode(d, ds, de, "EDQUOT", WSAEDQUOT, "Quota exceeded"); + add_errcode("EDQUOT", WSAEDQUOT, "Quota exceeded"); #endif #endif #ifdef ENOSTR - inscode(d, ds, de, "ENOSTR", ENOSTR, "Device not a stream"); + add_errcode("ENOSTR", ENOSTR, "Device not a stream"); #endif #ifdef EBADSLT - inscode(d, ds, de, "EBADSLT", EBADSLT, "Invalid slot"); + add_errcode("EBADSLT", EBADSLT, "Invalid slot"); #endif #ifdef EBADRQC - inscode(d, ds, de, "EBADRQC", EBADRQC, "Invalid request code"); + add_errcode("EBADRQC", EBADRQC, "Invalid request code"); #endif #ifdef ELIBACC - inscode(d, ds, de, "ELIBACC", ELIBACC, "Can not access a needed shared library"); + add_errcode("ELIBACC", ELIBACC, "Can not access a needed shared library"); #endif #ifdef EFAULT - inscode(d, ds, de, "EFAULT", EFAULT, "Bad address"); + add_errcode("EFAULT", EFAULT, "Bad address"); #else #ifdef WSAEFAULT - inscode(d, ds, de, "EFAULT", WSAEFAULT, "Bad address"); + add_errcode("EFAULT", WSAEFAULT, "Bad address"); #endif #endif #ifdef EFBIG - inscode(d, ds, de, "EFBIG", EFBIG, "File too large"); + add_errcode("EFBIG", EFBIG, "File too large"); #endif #ifdef EDEADLK - inscode(d, ds, de, "EDEADLK", EDEADLK, "Resource deadlock would occur"); + add_errcode("EDEADLK", EDEADLK, "Resource deadlock would occur"); #endif #ifdef ENOTCONN - inscode(d, ds, de, "ENOTCONN", ENOTCONN, "Transport endpoint is not connected"); + add_errcode("ENOTCONN", ENOTCONN, "Transport endpoint is not connected"); #else #ifdef WSAENOTCONN - inscode(d, ds, de, "ENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); + add_errcode("ENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); #endif #endif #ifdef EDESTADDRREQ - inscode(d, ds, de, "EDESTADDRREQ", EDESTADDRREQ, "Destination address required"); + add_errcode("EDESTADDRREQ", EDESTADDRREQ, "Destination address required"); #else #ifdef WSAEDESTADDRREQ - inscode(d, ds, de, "EDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); + add_errcode("EDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); #endif #endif #ifdef ELIBSCN - inscode(d, ds, de, "ELIBSCN", ELIBSCN, ".lib section in a.out corrupted"); + add_errcode("ELIBSCN", ELIBSCN, ".lib section in a.out corrupted"); #endif #ifdef ENOLCK - inscode(d, ds, de, "ENOLCK", ENOLCK, "No record locks available"); + add_errcode("ENOLCK", ENOLCK, "No record locks available"); #endif #ifdef EISNAM - inscode(d, ds, de, "EISNAM", EISNAM, "Is a named type file"); + add_errcode("EISNAM", EISNAM, "Is a named type file"); #endif #ifdef ECONNABORTED - inscode(d, ds, de, "ECONNABORTED", ECONNABORTED, "Software caused connection abort"); + add_errcode("ECONNABORTED", ECONNABORTED, "Software caused connection abort"); #else #ifdef WSAECONNABORTED - inscode(d, ds, de, "ECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); + add_errcode("ECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); #endif #endif #ifdef ENETUNREACH - inscode(d, ds, de, "ENETUNREACH", ENETUNREACH, "Network is unreachable"); + add_errcode("ENETUNREACH", ENETUNREACH, "Network is unreachable"); #else #ifdef WSAENETUNREACH - inscode(d, ds, de, "ENETUNREACH", WSAENETUNREACH, "Network is unreachable"); + add_errcode("ENETUNREACH", WSAENETUNREACH, "Network is unreachable"); #endif #endif #ifdef ESTALE - inscode(d, ds, de, "ESTALE", ESTALE, "Stale NFS file handle"); + add_errcode("ESTALE", ESTALE, "Stale NFS file handle"); #else #ifdef WSAESTALE - inscode(d, ds, de, "ESTALE", WSAESTALE, "Stale NFS file handle"); + add_errcode("ESTALE", WSAESTALE, "Stale NFS file handle"); #endif #endif #ifdef ENOSR - inscode(d, ds, de, "ENOSR", ENOSR, "Out of streams resources"); + add_errcode("ENOSR", ENOSR, "Out of streams resources"); #endif #ifdef ENOMEM - inscode(d, ds, de, "ENOMEM", ENOMEM, "Out of memory"); + add_errcode("ENOMEM", ENOMEM, "Out of memory"); #endif #ifdef ENOTSOCK - inscode(d, ds, de, "ENOTSOCK", ENOTSOCK, "Socket operation on non-socket"); + add_errcode("ENOTSOCK", ENOTSOCK, "Socket operation on non-socket"); #else #ifdef WSAENOTSOCK - inscode(d, ds, de, "ENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); + add_errcode("ENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); #endif #endif #ifdef ESTRPIPE - inscode(d, ds, de, "ESTRPIPE", ESTRPIPE, "Streams pipe error"); + add_errcode("ESTRPIPE", ESTRPIPE, "Streams pipe error"); #endif #ifdef EMLINK - inscode(d, ds, de, "EMLINK", EMLINK, "Too many links"); + add_errcode("EMLINK", EMLINK, "Too many links"); #endif #ifdef ERANGE - inscode(d, ds, de, "ERANGE", ERANGE, "Math result not representable"); + add_errcode("ERANGE", ERANGE, "Math result not representable"); #endif #ifdef ELIBEXEC - inscode(d, ds, de, "ELIBEXEC", ELIBEXEC, "Cannot exec a shared library directly"); + add_errcode("ELIBEXEC", ELIBEXEC, "Cannot exec a shared library directly"); #endif #ifdef EL3HLT - inscode(d, ds, de, "EL3HLT", EL3HLT, "Level 3 halted"); + add_errcode("EL3HLT", EL3HLT, "Level 3 halted"); #endif #ifdef ECONNRESET - inscode(d, ds, de, "ECONNRESET", ECONNRESET, "Connection reset by peer"); + add_errcode("ECONNRESET", ECONNRESET, "Connection reset by peer"); #else #ifdef WSAECONNRESET - inscode(d, ds, de, "ECONNRESET", WSAECONNRESET, "Connection reset by peer"); + add_errcode("ECONNRESET", WSAECONNRESET, "Connection reset by peer"); #endif #endif #ifdef EADDRINUSE - inscode(d, ds, de, "EADDRINUSE", EADDRINUSE, "Address already in use"); + add_errcode("EADDRINUSE", EADDRINUSE, "Address already in use"); #else #ifdef WSAEADDRINUSE - inscode(d, ds, de, "EADDRINUSE", WSAEADDRINUSE, "Address already in use"); + add_errcode("EADDRINUSE", WSAEADDRINUSE, "Address already in use"); #endif #endif #ifdef EOPNOTSUPP - inscode(d, ds, de, "EOPNOTSUPP", EOPNOTSUPP, "Operation not supported on transport endpoint"); + add_errcode("EOPNOTSUPP", EOPNOTSUPP, "Operation not supported on transport endpoint"); #else #ifdef WSAEOPNOTSUPP - inscode(d, ds, de, "EOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); + add_errcode("EOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); #endif #endif #ifdef EREMCHG - inscode(d, ds, de, "EREMCHG", EREMCHG, "Remote address changed"); + add_errcode("EREMCHG", EREMCHG, "Remote address changed"); #endif #ifdef EAGAIN - inscode(d, ds, de, "EAGAIN", EAGAIN, "Try again"); + add_errcode("EAGAIN", EAGAIN, "Try again"); #endif #ifdef ENAMETOOLONG - inscode(d, ds, de, "ENAMETOOLONG", ENAMETOOLONG, "File name too long"); + add_errcode("ENAMETOOLONG", ENAMETOOLONG, "File name too long"); #else #ifdef WSAENAMETOOLONG - inscode(d, ds, de, "ENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); + add_errcode("ENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); #endif #endif #ifdef ENOTTY - inscode(d, ds, de, "ENOTTY", ENOTTY, "Not a typewriter"); + add_errcode("ENOTTY", ENOTTY, "Not a typewriter"); #endif #ifdef ERESTART - inscode(d, ds, de, "ERESTART", ERESTART, "Interrupted system call should be restarted"); + add_errcode("ERESTART", ERESTART, "Interrupted system call should be restarted"); #endif #ifdef ESOCKTNOSUPPORT - inscode(d, ds, de, "ESOCKTNOSUPPORT", ESOCKTNOSUPPORT, "Socket type not supported"); + add_errcode("ESOCKTNOSUPPORT", ESOCKTNOSUPPORT, "Socket type not supported"); #else #ifdef WSAESOCKTNOSUPPORT - inscode(d, ds, de, "ESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); + add_errcode("ESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); #endif #endif #ifdef ETIME - inscode(d, ds, de, "ETIME", ETIME, "Timer expired"); + add_errcode("ETIME", ETIME, "Timer expired"); #endif #ifdef EBFONT - inscode(d, ds, de, "EBFONT", EBFONT, "Bad font file format"); + add_errcode("EBFONT", EBFONT, "Bad font file format"); #endif #ifdef EDEADLOCK - inscode(d, ds, de, "EDEADLOCK", EDEADLOCK, "Error EDEADLOCK"); + add_errcode("EDEADLOCK", EDEADLOCK, "Error EDEADLOCK"); #endif #ifdef ETOOMANYREFS - inscode(d, ds, de, "ETOOMANYREFS", ETOOMANYREFS, "Too many references: cannot splice"); + add_errcode("ETOOMANYREFS", ETOOMANYREFS, "Too many references: cannot splice"); #else #ifdef WSAETOOMANYREFS - inscode(d, ds, de, "ETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); + add_errcode("ETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); #endif #endif #ifdef EMFILE - inscode(d, ds, de, "EMFILE", EMFILE, "Too many open files"); + add_errcode("EMFILE", EMFILE, "Too many open files"); #else #ifdef WSAEMFILE - inscode(d, ds, de, "EMFILE", WSAEMFILE, "Too many open files"); + add_errcode("EMFILE", WSAEMFILE, "Too many open files"); #endif #endif #ifdef ETXTBSY - inscode(d, ds, de, "ETXTBSY", ETXTBSY, "Text file busy"); + add_errcode("ETXTBSY", ETXTBSY, "Text file busy"); #endif #ifdef EINPROGRESS - inscode(d, ds, de, "EINPROGRESS", EINPROGRESS, "Operation now in progress"); + add_errcode("EINPROGRESS", EINPROGRESS, "Operation now in progress"); #else #ifdef WSAEINPROGRESS - inscode(d, ds, de, "EINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); + add_errcode("EINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); #endif #endif #ifdef ENXIO - inscode(d, ds, de, "ENXIO", ENXIO, "No such device or address"); + add_errcode("ENXIO", ENXIO, "No such device or address"); #endif #ifdef ENOPKG - inscode(d, ds, de, "ENOPKG", ENOPKG, "Package not installed"); + add_errcode("ENOPKG", ENOPKG, "Package not installed"); #endif #ifdef WSASY - inscode(d, ds, de, "WSASY", WSASY, "Error WSASY"); + add_errcode("WSASY", WSASY, "Error WSASY"); #endif #ifdef WSAEHOSTDOWN - inscode(d, ds, de, "WSAEHOSTDOWN", WSAEHOSTDOWN, "Host is down"); + add_errcode("WSAEHOSTDOWN", WSAEHOSTDOWN, "Host is down"); #endif #ifdef WSAENETDOWN - inscode(d, ds, de, "WSAENETDOWN", WSAENETDOWN, "Network is down"); + add_errcode("WSAENETDOWN", WSAENETDOWN, "Network is down"); #endif #ifdef WSAENOTSOCK - inscode(d, ds, de, "WSAENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); + add_errcode("WSAENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); #endif #ifdef WSAEHOSTUNREACH - inscode(d, ds, de, "WSAEHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); + add_errcode("WSAEHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); #endif #ifdef WSAELOOP - inscode(d, ds, de, "WSAELOOP", WSAELOOP, "Too many symbolic links encountered"); + add_errcode("WSAELOOP", WSAELOOP, "Too many symbolic links encountered"); #endif #ifdef WSAEMFILE - inscode(d, ds, de, "WSAEMFILE", WSAEMFILE, "Too many open files"); + add_errcode("WSAEMFILE", WSAEMFILE, "Too many open files"); #endif #ifdef WSAESTALE - inscode(d, ds, de, "WSAESTALE", WSAESTALE, "Stale NFS file handle"); + add_errcode("WSAESTALE", WSAESTALE, "Stale NFS file handle"); #endif #ifdef WSAVERNOTSUPPORTED - inscode(d, ds, de, "WSAVERNOTSUPPORTED", WSAVERNOTSUPPORTED, "Error WSAVERNOTSUPPORTED"); + add_errcode("WSAVERNOTSUPPORTED", WSAVERNOTSUPPORTED, "Error WSAVERNOTSUPPORTED"); #endif #ifdef WSAENETUNREACH - inscode(d, ds, de, "WSAENETUNREACH", WSAENETUNREACH, "Network is unreachable"); + add_errcode("WSAENETUNREACH", WSAENETUNREACH, "Network is unreachable"); #endif #ifdef WSAEPROCLIM - inscode(d, ds, de, "WSAEPROCLIM", WSAEPROCLIM, "Error WSAEPROCLIM"); + add_errcode("WSAEPROCLIM", WSAEPROCLIM, "Error WSAEPROCLIM"); #endif #ifdef WSAEFAULT - inscode(d, ds, de, "WSAEFAULT", WSAEFAULT, "Bad address"); + add_errcode("WSAEFAULT", WSAEFAULT, "Bad address"); #endif #ifdef WSANOTINITIALISED - inscode(d, ds, de, "WSANOTINITIALISED", WSANOTINITIALISED, "Error WSANOTINITIALISED"); + add_errcode("WSANOTINITIALISED", WSANOTINITIALISED, "Error WSANOTINITIALISED"); #endif #ifdef WSAEUSERS - inscode(d, ds, de, "WSAEUSERS", WSAEUSERS, "Too many users"); + add_errcode("WSAEUSERS", WSAEUSERS, "Too many users"); #endif #ifdef WSAMAKEASYNCREPL - inscode(d, ds, de, "WSAMAKEASYNCREPL", WSAMAKEASYNCREPL, "Error WSAMAKEASYNCREPL"); + add_errcode("WSAMAKEASYNCREPL", WSAMAKEASYNCREPL, "Error WSAMAKEASYNCREPL"); #endif #ifdef WSAENOPROTOOPT - inscode(d, ds, de, "WSAENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); + add_errcode("WSAENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); #endif #ifdef WSAECONNABORTED - inscode(d, ds, de, "WSAECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); + add_errcode("WSAECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); #endif #ifdef WSAENAMETOOLONG - inscode(d, ds, de, "WSAENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); + add_errcode("WSAENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); #endif #ifdef WSAENOTEMPTY - inscode(d, ds, de, "WSAENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); + add_errcode("WSAENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); #endif #ifdef WSAESHUTDOWN - inscode(d, ds, de, "WSAESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); + add_errcode("WSAESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); #endif #ifdef WSAEAFNOSUPPORT - inscode(d, ds, de, "WSAEAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); + add_errcode("WSAEAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); #endif #ifdef WSAETOOMANYREFS - inscode(d, ds, de, "WSAETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); + add_errcode("WSAETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); #endif #ifdef WSAEACCES - inscode(d, ds, de, "WSAEACCES", WSAEACCES, "Permission denied"); + add_errcode("WSAEACCES", WSAEACCES, "Permission denied"); #endif #ifdef WSATR - inscode(d, ds, de, "WSATR", WSATR, "Error WSATR"); + add_errcode("WSATR", WSATR, "Error WSATR"); #endif #ifdef WSABASEERR - inscode(d, ds, de, "WSABASEERR", WSABASEERR, "Error WSABASEERR"); + add_errcode("WSABASEERR", WSABASEERR, "Error WSABASEERR"); #endif #ifdef WSADESCRIPTIO - inscode(d, ds, de, "WSADESCRIPTIO", WSADESCRIPTIO, "Error WSADESCRIPTIO"); + add_errcode("WSADESCRIPTIO", WSADESCRIPTIO, "Error WSADESCRIPTIO"); #endif #ifdef WSAEMSGSIZE - inscode(d, ds, de, "WSAEMSGSIZE", WSAEMSGSIZE, "Message too long"); + add_errcode("WSAEMSGSIZE", WSAEMSGSIZE, "Message too long"); #endif #ifdef WSAEBADF - inscode(d, ds, de, "WSAEBADF", WSAEBADF, "Bad file number"); + add_errcode("WSAEBADF", WSAEBADF, "Bad file number"); #endif #ifdef WSAECONNRESET - inscode(d, ds, de, "WSAECONNRESET", WSAECONNRESET, "Connection reset by peer"); + add_errcode("WSAECONNRESET", WSAECONNRESET, "Connection reset by peer"); #endif #ifdef WSAGETSELECTERRO - inscode(d, ds, de, "WSAGETSELECTERRO", WSAGETSELECTERRO, "Error WSAGETSELECTERRO"); + add_errcode("WSAGETSELECTERRO", WSAGETSELECTERRO, "Error WSAGETSELECTERRO"); #endif #ifdef WSAETIMEDOUT - inscode(d, ds, de, "WSAETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); + add_errcode("WSAETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); #endif #ifdef WSAENOBUFS - inscode(d, ds, de, "WSAENOBUFS", WSAENOBUFS, "No buffer space available"); + add_errcode("WSAENOBUFS", WSAENOBUFS, "No buffer space available"); #endif #ifdef WSAEDISCON - inscode(d, ds, de, "WSAEDISCON", WSAEDISCON, "Error WSAEDISCON"); + add_errcode("WSAEDISCON", WSAEDISCON, "Error WSAEDISCON"); #endif #ifdef WSAEINTR - inscode(d, ds, de, "WSAEINTR", WSAEINTR, "Interrupted system call"); + add_errcode("WSAEINTR", WSAEINTR, "Interrupted system call"); #endif #ifdef WSAEPROTOTYPE - inscode(d, ds, de, "WSAEPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); + add_errcode("WSAEPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); #endif #ifdef WSAHOS - inscode(d, ds, de, "WSAHOS", WSAHOS, "Error WSAHOS"); + add_errcode("WSAHOS", WSAHOS, "Error WSAHOS"); #endif #ifdef WSAEADDRINUSE - inscode(d, ds, de, "WSAEADDRINUSE", WSAEADDRINUSE, "Address already in use"); + add_errcode("WSAEADDRINUSE", WSAEADDRINUSE, "Address already in use"); #endif #ifdef WSAEADDRNOTAVAIL - inscode(d, ds, de, "WSAEADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); + add_errcode("WSAEADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); #endif #ifdef WSAEALREADY - inscode(d, ds, de, "WSAEALREADY", WSAEALREADY, "Operation already in progress"); + add_errcode("WSAEALREADY", WSAEALREADY, "Operation already in progress"); #endif #ifdef WSAEPROTONOSUPPORT - inscode(d, ds, de, "WSAEPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); + add_errcode("WSAEPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); #endif #ifdef WSASYSNOTREADY - inscode(d, ds, de, "WSASYSNOTREADY", WSASYSNOTREADY, "Error WSASYSNOTREADY"); + add_errcode("WSASYSNOTREADY", WSASYSNOTREADY, "Error WSASYSNOTREADY"); #endif #ifdef WSAEWOULDBLOCK - inscode(d, ds, de, "WSAEWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); + add_errcode("WSAEWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); #endif #ifdef WSAEPFNOSUPPORT - inscode(d, ds, de, "WSAEPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); + add_errcode("WSAEPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); #endif #ifdef WSAEOPNOTSUPP - inscode(d, ds, de, "WSAEOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); + add_errcode("WSAEOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); #endif #ifdef WSAEISCONN - inscode(d, ds, de, "WSAEISCONN", WSAEISCONN, "Transport endpoint is already connected"); + add_errcode("WSAEISCONN", WSAEISCONN, "Transport endpoint is already connected"); #endif #ifdef WSAEDQUOT - inscode(d, ds, de, "WSAEDQUOT", WSAEDQUOT, "Quota exceeded"); + add_errcode("WSAEDQUOT", WSAEDQUOT, "Quota exceeded"); #endif #ifdef WSAENOTCONN - inscode(d, ds, de, "WSAENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); + add_errcode("WSAENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); #endif #ifdef WSAEREMOTE - inscode(d, ds, de, "WSAEREMOTE", WSAEREMOTE, "Object is remote"); + add_errcode("WSAEREMOTE", WSAEREMOTE, "Object is remote"); #endif #ifdef WSAEINVAL - inscode(d, ds, de, "WSAEINVAL", WSAEINVAL, "Invalid argument"); + add_errcode("WSAEINVAL", WSAEINVAL, "Invalid argument"); #endif #ifdef WSAEINPROGRESS - inscode(d, ds, de, "WSAEINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); + add_errcode("WSAEINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); #endif #ifdef WSAGETSELECTEVEN - inscode(d, ds, de, "WSAGETSELECTEVEN", WSAGETSELECTEVEN, "Error WSAGETSELECTEVEN"); + add_errcode("WSAGETSELECTEVEN", WSAGETSELECTEVEN, "Error WSAGETSELECTEVEN"); #endif #ifdef WSAESOCKTNOSUPPORT - inscode(d, ds, de, "WSAESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); + add_errcode("WSAESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); #endif #ifdef WSAGETASYNCERRO - inscode(d, ds, de, "WSAGETASYNCERRO", WSAGETASYNCERRO, "Error WSAGETASYNCERRO"); + add_errcode("WSAGETASYNCERRO", WSAGETASYNCERRO, "Error WSAGETASYNCERRO"); #endif #ifdef WSAMAKESELECTREPL - inscode(d, ds, de, "WSAMAKESELECTREPL", WSAMAKESELECTREPL, "Error WSAMAKESELECTREPL"); + add_errcode("WSAMAKESELECTREPL", WSAMAKESELECTREPL, "Error WSAMAKESELECTREPL"); #endif #ifdef WSAGETASYNCBUFLE - inscode(d, ds, de, "WSAGETASYNCBUFLE", WSAGETASYNCBUFLE, "Error WSAGETASYNCBUFLE"); + add_errcode("WSAGETASYNCBUFLE", WSAGETASYNCBUFLE, "Error WSAGETASYNCBUFLE"); #endif #ifdef WSAEDESTADDRREQ - inscode(d, ds, de, "WSAEDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); + add_errcode("WSAEDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); #endif #ifdef WSAECONNREFUSED - inscode(d, ds, de, "WSAECONNREFUSED", WSAECONNREFUSED, "Connection refused"); + add_errcode("WSAECONNREFUSED", WSAECONNREFUSED, "Connection refused"); #endif #ifdef WSAENETRESET - inscode(d, ds, de, "WSAENETRESET", WSAENETRESET, "Network dropped connection because of reset"); + add_errcode("WSAENETRESET", WSAENETRESET, "Network dropped connection because of reset"); #endif #ifdef WSAN - inscode(d, ds, de, "WSAN", WSAN, "Error WSAN"); + add_errcode("WSAN", WSAN, "Error WSAN"); #endif #ifdef ENOMEDIUM - inscode(d, ds, de, "ENOMEDIUM", ENOMEDIUM, "No medium found"); + add_errcode("ENOMEDIUM", ENOMEDIUM, "No medium found"); #endif #ifdef EMEDIUMTYPE - inscode(d, ds, de, "EMEDIUMTYPE", EMEDIUMTYPE, "Wrong medium type"); + add_errcode("EMEDIUMTYPE", EMEDIUMTYPE, "Wrong medium type"); #endif #ifdef ECANCELED - inscode(d, ds, de, "ECANCELED", ECANCELED, "Operation Canceled"); + add_errcode("ECANCELED", ECANCELED, "Operation Canceled"); #endif #ifdef ENOKEY - inscode(d, ds, de, "ENOKEY", ENOKEY, "Required key not available"); + add_errcode("ENOKEY", ENOKEY, "Required key not available"); #endif #ifdef EKEYEXPIRED - inscode(d, ds, de, "EKEYEXPIRED", EKEYEXPIRED, "Key has expired"); + add_errcode("EKEYEXPIRED", EKEYEXPIRED, "Key has expired"); #endif #ifdef EKEYREVOKED - inscode(d, ds, de, "EKEYREVOKED", EKEYREVOKED, "Key has been revoked"); + add_errcode("EKEYREVOKED", EKEYREVOKED, "Key has been revoked"); #endif #ifdef EKEYREJECTED - inscode(d, ds, de, "EKEYREJECTED", EKEYREJECTED, "Key was rejected by service"); + add_errcode("EKEYREJECTED", EKEYREJECTED, "Key was rejected by service"); #endif #ifdef EOWNERDEAD - inscode(d, ds, de, "EOWNERDEAD", EOWNERDEAD, "Owner died"); + add_errcode("EOWNERDEAD", EOWNERDEAD, "Owner died"); #endif #ifdef ENOTRECOVERABLE - inscode(d, ds, de, "ENOTRECOVERABLE", ENOTRECOVERABLE, "State not recoverable"); + add_errcode("ENOTRECOVERABLE", ENOTRECOVERABLE, "State not recoverable"); #endif #ifdef ERFKILL - inscode(d, ds, de, "ERFKILL", ERFKILL, "Operation not possible due to RF-kill"); + add_errcode("ERFKILL", ERFKILL, "Operation not possible due to RF-kill"); #endif /* Solaris-specific errnos */ #ifdef ECANCELED - inscode(d, ds, de, "ECANCELED", ECANCELED, "Operation canceled"); + add_errcode("ECANCELED", ECANCELED, "Operation canceled"); #endif #ifdef ENOTSUP - inscode(d, ds, de, "ENOTSUP", ENOTSUP, "Operation not supported"); + add_errcode("ENOTSUP", ENOTSUP, "Operation not supported"); #endif #ifdef EOWNERDEAD - inscode(d, ds, de, "EOWNERDEAD", EOWNERDEAD, "Process died with the lock"); + add_errcode("EOWNERDEAD", EOWNERDEAD, "Process died with the lock"); #endif #ifdef ENOTRECOVERABLE - inscode(d, ds, de, "ENOTRECOVERABLE", ENOTRECOVERABLE, "Lock is not recoverable"); + add_errcode("ENOTRECOVERABLE", ENOTRECOVERABLE, "Lock is not recoverable"); #endif #ifdef ELOCKUNMAPPED - inscode(d, ds, de, "ELOCKUNMAPPED", ELOCKUNMAPPED, "Locked lock was unmapped"); + add_errcode("ELOCKUNMAPPED", ELOCKUNMAPPED, "Locked lock was unmapped"); #endif #ifdef ENOTACTIVE - inscode(d, ds, de, "ENOTACTIVE", ENOTACTIVE, "Facility is not active"); + add_errcode("ENOTACTIVE", ENOTACTIVE, "Facility is not active"); #endif /* MacOSX specific errnos */ #ifdef EAUTH - inscode(d, ds, de, "EAUTH", EAUTH, "Authentication error"); + add_errcode("EAUTH", EAUTH, "Authentication error"); #endif #ifdef EBADARCH - inscode(d, ds, de, "EBADARCH", EBADARCH, "Bad CPU type in executable"); + add_errcode("EBADARCH", EBADARCH, "Bad CPU type in executable"); #endif #ifdef EBADEXEC - inscode(d, ds, de, "EBADEXEC", EBADEXEC, "Bad executable (or shared library)"); + add_errcode("EBADEXEC", EBADEXEC, "Bad executable (or shared library)"); #endif #ifdef EBADMACHO - inscode(d, ds, de, "EBADMACHO", EBADMACHO, "Malformed Mach-o file"); + add_errcode("EBADMACHO", EBADMACHO, "Malformed Mach-o file"); #endif #ifdef EBADRPC - inscode(d, ds, de, "EBADRPC", EBADRPC, "RPC struct is bad"); + add_errcode("EBADRPC", EBADRPC, "RPC struct is bad"); #endif #ifdef EDEVERR - inscode(d, ds, de, "EDEVERR", EDEVERR, "Device error"); + add_errcode("EDEVERR", EDEVERR, "Device error"); #endif #ifdef EFTYPE - inscode(d, ds, de, "EFTYPE", EFTYPE, "Inappropriate file type or format"); + add_errcode("EFTYPE", EFTYPE, "Inappropriate file type or format"); #endif #ifdef ENEEDAUTH - inscode(d, ds, de, "ENEEDAUTH", ENEEDAUTH, "Need authenticator"); + add_errcode("ENEEDAUTH", ENEEDAUTH, "Need authenticator"); #endif #ifdef ENOATTR - inscode(d, ds, de, "ENOATTR", ENOATTR, "Attribute not found"); + add_errcode("ENOATTR", ENOATTR, "Attribute not found"); #endif #ifdef ENOPOLICY - inscode(d, ds, de, "ENOPOLICY", ENOPOLICY, "Policy not found"); + add_errcode("ENOPOLICY", ENOPOLICY, "Policy not found"); #endif #ifdef EPROCLIM - inscode(d, ds, de, "EPROCLIM", EPROCLIM, "Too many processes"); + add_errcode("EPROCLIM", EPROCLIM, "Too many processes"); #endif #ifdef EPROCUNAVAIL - inscode(d, ds, de, "EPROCUNAVAIL", EPROCUNAVAIL, "Bad procedure for program"); + add_errcode("EPROCUNAVAIL", EPROCUNAVAIL, "Bad procedure for program"); #endif #ifdef EPROGMISMATCH - inscode(d, ds, de, "EPROGMISMATCH", EPROGMISMATCH, "Program version wrong"); + add_errcode("EPROGMISMATCH", EPROGMISMATCH, "Program version wrong"); #endif #ifdef EPROGUNAVAIL - inscode(d, ds, de, "EPROGUNAVAIL", EPROGUNAVAIL, "RPC prog. not avail"); + add_errcode("EPROGUNAVAIL", EPROGUNAVAIL, "RPC prog. not avail"); #endif #ifdef EPWROFF - inscode(d, ds, de, "EPWROFF", EPWROFF, "Device power is off"); + add_errcode("EPWROFF", EPWROFF, "Device power is off"); #endif #ifdef ERPCMISMATCH - inscode(d, ds, de, "ERPCMISMATCH", ERPCMISMATCH, "RPC version wrong"); + add_errcode("ERPCMISMATCH", ERPCMISMATCH, "RPC version wrong"); #endif #ifdef ESHLIBVERS - inscode(d, ds, de, "ESHLIBVERS", ESHLIBVERS, "Shared library version mismatch"); + add_errcode("ESHLIBVERS", ESHLIBVERS, "Shared library version mismatch"); #endif - Py_DECREF(de); - return m; + Py_DECREF(error_dict); + return 0; +} + +static PyModuleDef_Slot errno_slots[] = { + {Py_mod_exec, errno_exec}, + {0, NULL} +}; + +PyDoc_STRVAR(errno__doc__, +"This module makes available standard errno system symbols.\n\ +\n\ +The value of each symbol is the corresponding integer value,\n\ +e.g., on most systems, errno.ENOENT equals the integer 2.\n\ +\n\ +The dictionary errno.errorcode maps numeric codes to symbol names,\n\ +e.g., errno.errorcode[2] could be the string 'ENOENT'.\n\ +\n\ +Symbols that are not relevant to the underlying system are not defined.\n\ +\n\ +To map error codes to error messages, use the function os.strerror(),\n\ +e.g. os.strerror(2) could return 'No such file or directory'."); + +static struct PyModuleDef errnomodule = { + PyModuleDef_HEAD_INIT, + .m_name = "errno", + .m_doc = errno__doc__, + .m_size = 0, + .m_methods = errno_methods, + .m_slots = errno_slots, +}; + +PyMODINIT_FUNC +PyInit_errno(void) +{ + return PyModuleDef_Init(&errnomodule); } From 2f37c355ab0e9ec9c1753985d27c41fa0bd719b9 Mon Sep 17 00:00:00 2001 From: Lysandros Nikolaou Date: Thu, 7 May 2020 13:37:51 +0300 Subject: [PATCH 034/115] bpo-40334: Fix error location upon parsing an invalid string literal (GH-19962) When parsing a string with an invalid escape, the old parser used to point to the beginning of the invalid string. This commit changes the new parser to match that behaviour, since it's currently pointing to the end of the string (or to be more precise, to the beginning of the next token). --- Lib/test/test_cmd_line_script.py | 2 +- Lib/test/test_string_literals.py | 7 +++--- Parser/pegen/parse_string.c | 38 ++++++++++++++++++++------------ Parser/pegen/parse_string.h | 4 ++-- Parser/pegen/pegen.c | 10 +++------ Parser/pegen/pegen.h | 1 + 6 files changed, 34 insertions(+), 28 deletions(-) diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index 1fc9500738f352..171340581af228 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -648,7 +648,7 @@ def test_syntaxerror_invalid_escape_sequence_multi_line(self): self.assertEqual( stderr.splitlines()[-3:], [ b' foo = """\\q"""', - b' ^', + b' ^', b'SyntaxError: invalid escape sequence \\q' ], ) diff --git a/Lib/test/test_string_literals.py b/Lib/test/test_string_literals.py index 5b5477d14d467d..9565ee2485afd1 100644 --- a/Lib/test/test_string_literals.py +++ b/Lib/test/test_string_literals.py @@ -118,8 +118,7 @@ def test_eval_str_invalid_escape(self): eval("'''\n\\z'''") self.assertEqual(len(w), 1) self.assertEqual(w[0].filename, '') - if use_old_parser(): - self.assertEqual(w[0].lineno, 1) + self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('error', category=DeprecationWarning) @@ -128,8 +127,8 @@ def test_eval_str_invalid_escape(self): exc = cm.exception self.assertEqual(w, []) self.assertEqual(exc.filename, '') - if use_old_parser(): - self.assertEqual(exc.lineno, 1) + self.assertEqual(exc.lineno, 1) + self.assertEqual(exc.offset, 1) def test_eval_str_raw(self): self.assertEqual(eval(""" r'x' """), 'x') diff --git a/Parser/pegen/parse_string.c b/Parser/pegen/parse_string.c index d96303dc183fa7..ca4b733c153b57 100644 --- a/Parser/pegen/parse_string.c +++ b/Parser/pegen/parse_string.c @@ -12,7 +12,7 @@ // file (like "_PyPegen_raise_syntax_error"). static int -warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char) +warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char, Token *t) { PyObject *msg = PyUnicode_FromFormat("invalid escape sequence \\%c", first_invalid_escape_char); @@ -20,11 +20,16 @@ warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char) return -1; } if (PyErr_WarnExplicitObject(PyExc_DeprecationWarning, msg, p->tok->filename, - p->tok->lineno, NULL, NULL) < 0) { + t->lineno, NULL, NULL) < 0) { if (PyErr_ExceptionMatches(PyExc_DeprecationWarning)) { /* Replace the DeprecationWarning exception with a SyntaxError to get a more accurate error report */ PyErr_Clear(); + + /* This is needed, in order for the SyntaxError to point to the token t, + since _PyPegen_raise_error uses p->tokens[p->fill - 1] for the + error location, if p->known_err_token is not set. */ + p->known_err_token = t; RAISE_SYNTAX_ERROR("invalid escape sequence \\%c", first_invalid_escape_char); } Py_DECREF(msg); @@ -47,7 +52,7 @@ decode_utf8(const char **sPtr, const char *end) } static PyObject * -decode_unicode_with_escapes(Parser *parser, const char *s, size_t len) +decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t) { PyObject *v, *u; char *buf; @@ -110,7 +115,7 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len) v = _PyUnicode_DecodeUnicodeEscape(s, len, NULL, &first_invalid_escape); if (v != NULL && first_invalid_escape != NULL) { - if (warn_invalid_escape_sequence(parser, *first_invalid_escape) < 0) { + if (warn_invalid_escape_sequence(parser, *first_invalid_escape, t) < 0) { /* We have not decref u before because first_invalid_escape points inside u. */ Py_XDECREF(u); @@ -123,7 +128,7 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len) } static PyObject * -decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len) +decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len, Token *t) { const char *first_invalid_escape; PyObject *result = _PyBytes_DecodeEscape(s, len, NULL, &first_invalid_escape); @@ -132,7 +137,7 @@ decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len) } if (first_invalid_escape != NULL) { - if (warn_invalid_escape_sequence(p, *first_invalid_escape) < 0) { + if (warn_invalid_escape_sequence(p, *first_invalid_escape, t) < 0) { Py_DECREF(result); return NULL; } @@ -146,9 +151,14 @@ decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len) If the string is an f-string, set *fstr and *fstrlen to the unparsed string object. Return 0 if no errors occurred. */ int -_PyPegen_parsestr(Parser *p, const char *s, int *bytesmode, int *rawmode, PyObject **result, - const char **fstr, Py_ssize_t *fstrlen) +_PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result, + const char **fstr, Py_ssize_t *fstrlen, Token *t) { + const char *s = PyBytes_AsString(t->bytes); + if (s == NULL) { + return -1; + } + size_t len; int quote = Py_CHARMASK(*s); int fmode = 0; @@ -245,7 +255,7 @@ _PyPegen_parsestr(Parser *p, const char *s, int *bytesmode, int *rawmode, PyObje *result = PyBytes_FromStringAndSize(s, len); } else { - *result = decode_bytes_with_escapes(p, s, len); + *result = decode_bytes_with_escapes(p, s, len, t); } } else { @@ -253,7 +263,7 @@ _PyPegen_parsestr(Parser *p, const char *s, int *bytesmode, int *rawmode, PyObje *result = PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL); } else { - *result = decode_unicode_with_escapes(p, s, len); + *result = decode_unicode_with_escapes(p, s, len, t); } } return *result == NULL ? -1 : 0; @@ -637,7 +647,7 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, */ static int fstring_find_literal(Parser *p, const char **str, const char *end, int raw, - PyObject **literal, int recurse_lvl) + PyObject **literal, int recurse_lvl, Token *t) { /* Get any literal string. It ends when we hit an un-doubled left brace (which isn't part of a unicode name escape such as @@ -660,7 +670,7 @@ fstring_find_literal(Parser *p, const char **str, const char *end, int raw, } break; } - if (ch == '{' && warn_invalid_escape_sequence(p, ch) < 0) { + if (ch == '{' && warn_invalid_escape_sequence(p, ch, t) < 0) { return -1; } } @@ -704,7 +714,7 @@ fstring_find_literal(Parser *p, const char **str, const char *end, int raw, NULL, NULL); else *literal = decode_unicode_with_escapes(p, literal_start, - s - literal_start); + s - literal_start, t); if (!*literal) return -1; } @@ -1041,7 +1051,7 @@ fstring_find_literal_and_expr(Parser *p, const char **str, const char *end, int assert(*literal == NULL && *expression == NULL); /* Get any literal string. */ - result = fstring_find_literal(p, str, end, raw, literal, recurse_lvl); + result = fstring_find_literal(p, str, end, raw, literal, recurse_lvl, t); if (result < 0) goto error; diff --git a/Parser/pegen/parse_string.h b/Parser/pegen/parse_string.h index 4f2aa94fc19b05..cd85bd57d0a383 100644 --- a/Parser/pegen/parse_string.h +++ b/Parser/pegen/parse_string.h @@ -34,8 +34,8 @@ typedef struct { } FstringParser; void _PyPegen_FstringParser_Init(FstringParser *); -int _PyPegen_parsestr(Parser *, const char *, int *, int *, PyObject **, - const char **, Py_ssize_t *); +int _PyPegen_parsestr(Parser *, int *, int *, PyObject **, + const char **, Py_ssize_t *, Token *); int _PyPegen_FstringParser_ConcatFstring(Parser *, FstringParser *, const char **, const char *, int, int, Token *, Token *, Token *); diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index c311593af70f58..06af53b3597f74 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -383,7 +383,7 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const ch PyObject *errstr = NULL; PyObject *loc = NULL; PyObject *tmp = NULL; - Token *t = p->tokens[p->fill - 1]; + Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1]; Py_ssize_t col_number = !with_col_number; va_list va; p->error_indicator = 1; @@ -1053,6 +1053,7 @@ _PyPegen_Parser_New(struct tok_state *tok, int start_rule, int flags, p->starting_col_offset = 0; p->flags = flags; p->feature_version = feature_version; + p->known_err_token = NULL; return p; } @@ -1972,12 +1973,7 @@ _PyPegen_concatenate_strings(Parser *p, asdl_seq *strings) const char *fstr; Py_ssize_t fstrlen = -1; - char *this_str = PyBytes_AsString(t->bytes); - if (!this_str) { - goto error; - } - - if (_PyPegen_parsestr(p, this_str, &this_bytesmode, &this_rawmode, &s, &fstr, &fstrlen) != 0) { + if (_PyPegen_parsestr(p, &this_bytesmode, &this_rawmode, &s, &fstr, &fstrlen, t) != 0) { goto error; } diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index cbe6f197ac7423..ffb18e47e4a9a8 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -71,6 +71,7 @@ typedef struct { int flags; int feature_version; growable_comment_array type_ignore_comments; + Token *known_err_token; } Parser; typedef struct { From 4638c6429575bd6de26b12b2af5df74d6568b553 Mon Sep 17 00:00:00 2001 From: Lysandros Nikolaou Date: Thu, 7 May 2020 13:44:06 +0300 Subject: [PATCH 035/115] bpo-40334: Error message for invalid default args in function call (GH-19973) When parsing something like `f(g()=2)`, where the name of a default arg is not a NAME, but an arbitrary expression, a specialised error message is emitted. --- Grammar/python.gram | 4 + Lib/test/test_exceptions.py | 4 +- Lib/test/test_peg_parser.py | 3 + Parser/pegen/parse.c | 370 +++++++++++++++++++++--------------- 4 files changed, 222 insertions(+), 159 deletions(-) diff --git a/Grammar/python.gram b/Grammar/python.gram index 3d8a39b1d59066..574e1e14216449 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -548,10 +548,12 @@ kwarg_or_starred[KeywordOrStarred*]: | a=NAME '=' b=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(a->v.Name.id, b, EXTRA)), 1) } | a=starred_expression { _PyPegen_keyword_or_starred(p, a, 0) } + | invalid_kwarg kwarg_or_double_starred[KeywordOrStarred*]: | a=NAME '=' b=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(a->v.Name.id, b, EXTRA)), 1) } | '**' a=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(NULL, a, EXTRA)), 1) } + | invalid_kwarg # NOTE: star_targets may contain *bitwise_or, targets may not. star_targets[expr_ty]: @@ -620,6 +622,8 @@ incorrect_arguments: | expression for_if_clauses ',' [args | expression for_if_clauses] { RAISE_SYNTAX_ERROR("Generator expression must be parenthesized") } | a=args ',' args { _PyPegen_arguments_parsing_error(p, a) } +invalid_kwarg: + | expression '=' { RAISE_SYNTAX_ERROR("expression cannot contain assignment, perhaps you meant \"==\"?") } invalid_named_expression: | a=expression ':=' expression { RAISE_SYNTAX_ERROR("cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index d83b73ab340c35..dbd7fa6bdd9385 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -242,16 +242,16 @@ def baz(): check('from __future__ import doesnt_exist', 1, 1) check('from __future__ import braces', 1, 1) check('x=1\nfrom __future__ import division', 2, 1) + check('(yield i) = 2', 1, 1) check('def f(*):\n pass', 1, 7 if support.use_old_parser() else 8) + check('foo(1=2)', 1, 5 if support.use_old_parser() else 6) @support.skip_if_new_parser("Pegen column offsets might be different") def testSyntaxErrorOffsetCustom(self): self.check('for 1 in []: pass', 1, 5) self.check('[*x for x in xs]', 1, 2) self.check('def f():\n x, y: int', 2, 3) - self.check('(yield i) = 2', 1, 1) self.check('foo(x for x in range(10), 100)', 1, 5) - self.check('foo(1=2)', 1, 5) @cpython_only def testSettingException(self): diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py index d6939fdbf618a6..df2d46d8827f0e 100644 --- a/Lib/test/test_peg_parser.py +++ b/Lib/test/test_peg_parser.py @@ -609,6 +609,9 @@ def f(): ("lambda *: pass", "named arguments must follow bare *"), ("lambda *,: pass", "named arguments must follow bare *"), ("lambda *, **a: pass", "named arguments must follow bare *"), + ("f(g()=2", "expression cannot contain assignment, perhaps you meant \"==\"?"), + ("f(a, b, *c, d.e=2", "expression cannot contain assignment, perhaps you meant \"==\"?"), + ("f(*a, **b, c=0, d[1]=3)", "expression cannot contain assignment, perhaps you meant \"==\"?"), ] GOOD_BUT_FAIL_TEST_CASES = [ diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index b1da16640aa6e1..3a08abbca581c5 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -210,161 +210,162 @@ static KeywordToken *reserved_keywords[] = { #define t_lookahead_type 1139 #define t_atom_type 1140 #define incorrect_arguments_type 1141 -#define invalid_named_expression_type 1142 -#define invalid_assignment_type 1143 -#define invalid_block_type 1144 -#define invalid_comprehension_type 1145 -#define invalid_parameters_type 1146 -#define invalid_star_etc_type 1147 -#define invalid_lambda_star_etc_type 1148 -#define invalid_double_type_comments_type 1149 -#define _loop0_1_type 1150 -#define _loop0_2_type 1151 -#define _loop0_4_type 1152 -#define _gather_3_type 1153 -#define _loop0_6_type 1154 -#define _gather_5_type 1155 -#define _loop0_8_type 1156 -#define _gather_7_type 1157 -#define _loop0_10_type 1158 -#define _gather_9_type 1159 -#define _loop1_11_type 1160 -#define _loop0_13_type 1161 -#define _gather_12_type 1162 -#define _tmp_14_type 1163 -#define _tmp_15_type 1164 -#define _tmp_16_type 1165 -#define _tmp_17_type 1166 -#define _tmp_18_type 1167 -#define _tmp_19_type 1168 -#define _tmp_20_type 1169 -#define _tmp_21_type 1170 -#define _loop1_22_type 1171 -#define _tmp_23_type 1172 -#define _tmp_24_type 1173 -#define _loop0_26_type 1174 -#define _gather_25_type 1175 -#define _loop0_28_type 1176 -#define _gather_27_type 1177 -#define _tmp_29_type 1178 -#define _loop0_30_type 1179 -#define _loop1_31_type 1180 -#define _loop0_33_type 1181 -#define _gather_32_type 1182 -#define _tmp_34_type 1183 -#define _loop0_36_type 1184 -#define _gather_35_type 1185 -#define _tmp_37_type 1186 -#define _loop0_39_type 1187 -#define _gather_38_type 1188 -#define _loop0_41_type 1189 -#define _gather_40_type 1190 -#define _loop0_43_type 1191 -#define _gather_42_type 1192 -#define _loop0_45_type 1193 -#define _gather_44_type 1194 -#define _tmp_46_type 1195 -#define _loop1_47_type 1196 -#define _tmp_48_type 1197 -#define _tmp_49_type 1198 -#define _tmp_50_type 1199 -#define _tmp_51_type 1200 -#define _tmp_52_type 1201 -#define _loop0_53_type 1202 -#define _loop0_54_type 1203 -#define _loop0_55_type 1204 -#define _loop1_56_type 1205 -#define _loop0_57_type 1206 -#define _loop1_58_type 1207 -#define _loop1_59_type 1208 -#define _loop1_60_type 1209 -#define _loop0_61_type 1210 -#define _loop1_62_type 1211 -#define _loop0_63_type 1212 -#define _loop1_64_type 1213 -#define _loop0_65_type 1214 -#define _loop1_66_type 1215 -#define _loop1_67_type 1216 -#define _tmp_68_type 1217 -#define _loop0_70_type 1218 -#define _gather_69_type 1219 -#define _loop1_71_type 1220 -#define _loop0_73_type 1221 -#define _gather_72_type 1222 -#define _loop1_74_type 1223 -#define _loop0_75_type 1224 -#define _loop0_76_type 1225 -#define _loop0_77_type 1226 -#define _loop1_78_type 1227 -#define _loop0_79_type 1228 -#define _loop1_80_type 1229 -#define _loop1_81_type 1230 -#define _loop1_82_type 1231 -#define _loop0_83_type 1232 -#define _loop1_84_type 1233 -#define _loop0_85_type 1234 -#define _loop1_86_type 1235 -#define _loop0_87_type 1236 -#define _loop1_88_type 1237 -#define _loop1_89_type 1238 -#define _loop1_90_type 1239 -#define _loop1_91_type 1240 -#define _tmp_92_type 1241 -#define _loop0_94_type 1242 -#define _gather_93_type 1243 -#define _tmp_95_type 1244 -#define _tmp_96_type 1245 -#define _tmp_97_type 1246 -#define _tmp_98_type 1247 -#define _loop1_99_type 1248 -#define _tmp_100_type 1249 -#define _tmp_101_type 1250 -#define _loop0_103_type 1251 -#define _gather_102_type 1252 -#define _loop1_104_type 1253 -#define _loop0_105_type 1254 -#define _loop0_106_type 1255 -#define _tmp_107_type 1256 -#define _tmp_108_type 1257 -#define _loop0_110_type 1258 -#define _gather_109_type 1259 -#define _loop0_112_type 1260 -#define _gather_111_type 1261 -#define _loop0_114_type 1262 -#define _gather_113_type 1263 -#define _loop0_116_type 1264 -#define _gather_115_type 1265 -#define _loop0_117_type 1266 -#define _loop0_119_type 1267 -#define _gather_118_type 1268 -#define _tmp_120_type 1269 -#define _loop0_122_type 1270 -#define _gather_121_type 1271 -#define _loop0_124_type 1272 -#define _gather_123_type 1273 -#define _tmp_125_type 1274 -#define _tmp_126_type 1275 -#define _tmp_127_type 1276 -#define _tmp_128_type 1277 -#define _tmp_129_type 1278 -#define _loop0_130_type 1279 -#define _tmp_131_type 1280 -#define _tmp_132_type 1281 -#define _tmp_133_type 1282 -#define _tmp_134_type 1283 -#define _tmp_135_type 1284 -#define _tmp_136_type 1285 -#define _tmp_137_type 1286 -#define _tmp_138_type 1287 -#define _tmp_139_type 1288 -#define _tmp_140_type 1289 -#define _tmp_141_type 1290 -#define _tmp_142_type 1291 -#define _tmp_143_type 1292 -#define _tmp_144_type 1293 -#define _loop1_145_type 1294 -#define _tmp_146_type 1295 -#define _tmp_147_type 1296 +#define invalid_kwarg_type 1142 +#define invalid_named_expression_type 1143 +#define invalid_assignment_type 1144 +#define invalid_block_type 1145 +#define invalid_comprehension_type 1146 +#define invalid_parameters_type 1147 +#define invalid_star_etc_type 1148 +#define invalid_lambda_star_etc_type 1149 +#define invalid_double_type_comments_type 1150 +#define _loop0_1_type 1151 +#define _loop0_2_type 1152 +#define _loop0_4_type 1153 +#define _gather_3_type 1154 +#define _loop0_6_type 1155 +#define _gather_5_type 1156 +#define _loop0_8_type 1157 +#define _gather_7_type 1158 +#define _loop0_10_type 1159 +#define _gather_9_type 1160 +#define _loop1_11_type 1161 +#define _loop0_13_type 1162 +#define _gather_12_type 1163 +#define _tmp_14_type 1164 +#define _tmp_15_type 1165 +#define _tmp_16_type 1166 +#define _tmp_17_type 1167 +#define _tmp_18_type 1168 +#define _tmp_19_type 1169 +#define _tmp_20_type 1170 +#define _tmp_21_type 1171 +#define _loop1_22_type 1172 +#define _tmp_23_type 1173 +#define _tmp_24_type 1174 +#define _loop0_26_type 1175 +#define _gather_25_type 1176 +#define _loop0_28_type 1177 +#define _gather_27_type 1178 +#define _tmp_29_type 1179 +#define _loop0_30_type 1180 +#define _loop1_31_type 1181 +#define _loop0_33_type 1182 +#define _gather_32_type 1183 +#define _tmp_34_type 1184 +#define _loop0_36_type 1185 +#define _gather_35_type 1186 +#define _tmp_37_type 1187 +#define _loop0_39_type 1188 +#define _gather_38_type 1189 +#define _loop0_41_type 1190 +#define _gather_40_type 1191 +#define _loop0_43_type 1192 +#define _gather_42_type 1193 +#define _loop0_45_type 1194 +#define _gather_44_type 1195 +#define _tmp_46_type 1196 +#define _loop1_47_type 1197 +#define _tmp_48_type 1198 +#define _tmp_49_type 1199 +#define _tmp_50_type 1200 +#define _tmp_51_type 1201 +#define _tmp_52_type 1202 +#define _loop0_53_type 1203 +#define _loop0_54_type 1204 +#define _loop0_55_type 1205 +#define _loop1_56_type 1206 +#define _loop0_57_type 1207 +#define _loop1_58_type 1208 +#define _loop1_59_type 1209 +#define _loop1_60_type 1210 +#define _loop0_61_type 1211 +#define _loop1_62_type 1212 +#define _loop0_63_type 1213 +#define _loop1_64_type 1214 +#define _loop0_65_type 1215 +#define _loop1_66_type 1216 +#define _loop1_67_type 1217 +#define _tmp_68_type 1218 +#define _loop0_70_type 1219 +#define _gather_69_type 1220 +#define _loop1_71_type 1221 +#define _loop0_73_type 1222 +#define _gather_72_type 1223 +#define _loop1_74_type 1224 +#define _loop0_75_type 1225 +#define _loop0_76_type 1226 +#define _loop0_77_type 1227 +#define _loop1_78_type 1228 +#define _loop0_79_type 1229 +#define _loop1_80_type 1230 +#define _loop1_81_type 1231 +#define _loop1_82_type 1232 +#define _loop0_83_type 1233 +#define _loop1_84_type 1234 +#define _loop0_85_type 1235 +#define _loop1_86_type 1236 +#define _loop0_87_type 1237 +#define _loop1_88_type 1238 +#define _loop1_89_type 1239 +#define _loop1_90_type 1240 +#define _loop1_91_type 1241 +#define _tmp_92_type 1242 +#define _loop0_94_type 1243 +#define _gather_93_type 1244 +#define _tmp_95_type 1245 +#define _tmp_96_type 1246 +#define _tmp_97_type 1247 +#define _tmp_98_type 1248 +#define _loop1_99_type 1249 +#define _tmp_100_type 1250 +#define _tmp_101_type 1251 +#define _loop0_103_type 1252 +#define _gather_102_type 1253 +#define _loop1_104_type 1254 +#define _loop0_105_type 1255 +#define _loop0_106_type 1256 +#define _tmp_107_type 1257 +#define _tmp_108_type 1258 +#define _loop0_110_type 1259 +#define _gather_109_type 1260 +#define _loop0_112_type 1261 +#define _gather_111_type 1262 +#define _loop0_114_type 1263 +#define _gather_113_type 1264 +#define _loop0_116_type 1265 +#define _gather_115_type 1266 +#define _loop0_117_type 1267 +#define _loop0_119_type 1268 +#define _gather_118_type 1269 +#define _tmp_120_type 1270 +#define _loop0_122_type 1271 +#define _gather_121_type 1272 +#define _loop0_124_type 1273 +#define _gather_123_type 1274 +#define _tmp_125_type 1275 +#define _tmp_126_type 1276 +#define _tmp_127_type 1277 +#define _tmp_128_type 1278 +#define _tmp_129_type 1279 +#define _loop0_130_type 1280 +#define _tmp_131_type 1281 +#define _tmp_132_type 1282 +#define _tmp_133_type 1283 +#define _tmp_134_type 1284 +#define _tmp_135_type 1285 +#define _tmp_136_type 1286 +#define _tmp_137_type 1287 +#define _tmp_138_type 1288 +#define _tmp_139_type 1289 +#define _tmp_140_type 1290 +#define _tmp_141_type 1291 +#define _tmp_142_type 1292 +#define _tmp_143_type 1293 +#define _tmp_144_type 1294 +#define _loop1_145_type 1295 +#define _tmp_146_type 1296 +#define _tmp_147_type 1297 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -508,6 +509,7 @@ static expr_ty t_primary_rule(Parser *p); static void *t_lookahead_rule(Parser *p); static expr_ty t_atom_rule(Parser *p); static void *incorrect_arguments_rule(Parser *p); +static void *invalid_kwarg_rule(Parser *p); static void *invalid_named_expression_rule(Parser *p); static void *invalid_assignment_rule(Parser *p); static void *invalid_block_rule(Parser *p); @@ -9079,7 +9081,7 @@ starred_expression_rule(Parser *p) return res; } -// kwarg_or_starred: NAME '=' expression | starred_expression +// kwarg_or_starred: NAME '=' expression | starred_expression | invalid_kwarg static KeywordOrStarred* kwarg_or_starred_rule(Parser *p) { @@ -9140,12 +9142,23 @@ kwarg_or_starred_rule(Parser *p) } p->mark = mark; } + { // invalid_kwarg + void *invalid_kwarg_var; + if ( + (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg + ) + { + res = invalid_kwarg_var; + goto done; + } + p->mark = mark; + } res = NULL; done: return res; } -// kwarg_or_double_starred: NAME '=' expression | '**' expression +// kwarg_or_double_starred: NAME '=' expression | '**' expression | invalid_kwarg static KeywordOrStarred* kwarg_or_double_starred_rule(Parser *p) { @@ -9217,6 +9230,17 @@ kwarg_or_double_starred_rule(Parser *p) } p->mark = mark; } + { // invalid_kwarg + void *invalid_kwarg_var; + if ( + (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg + ) + { + res = invalid_kwarg_var; + goto done; + } + p->mark = mark; + } res = NULL; done: return res; @@ -10561,6 +10585,38 @@ incorrect_arguments_rule(Parser *p) return res; } +// invalid_kwarg: expression '=' +static void * +invalid_kwarg_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * res = NULL; + int mark = p->mark; + { // expression '=' + expr_ty expression_var; + Token * literal; + if ( + (expression_var = expression_rule(p)) // expression + && + (literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + res = RAISE_SYNTAX_ERROR ( "expression cannot contain assignment, perhaps you meant \"==\"?" ); + if (res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = mark; + } + res = NULL; + done: + return res; +} + // invalid_named_expression: expression ':=' expression static void * invalid_named_expression_rule(Parser *p) From e1becf46b4e3ba6d7d32ebf4bbd3e0804766a423 Mon Sep 17 00:00:00 2001 From: Petr Viktorin Date: Thu, 7 May 2020 15:39:59 +0200 Subject: [PATCH 036/115] bpo-38787: C API for module state access from extension methods (PEP 573) (GH-19936) Module C state is now accessible from C-defined heap type methods (PEP 573). Patch by Marcel Plch and Petr Viktorin. Co-authored-by: Marcel Plch Co-authored-by: Victor Stinner --- Doc/c-api/structures.rst | 50 +++- Doc/c-api/type.rst | 36 ++- Include/cpython/methodobject.h | 32 +++ Include/cpython/object.h | 1 + Include/methodobject.h | 48 ++-- Include/object.h | 5 + Lib/test/test_capi.py | 73 ++++++ Lib/test/test_sys.py | 2 +- Makefile.pre.in | 1 + .../2020-01-22-12-38-59.bpo-38787.HUH6hd.rst | 2 + Modules/_testmultiphase.c | 235 +++++++++++++++++- Modules/clinic/_testmultiphase.c.h | 101 ++++++++ Objects/descrobject.c | 47 +++- Objects/methodobject.c | 83 ++++++- Objects/object.c | 1 + Objects/typeobject.c | 63 ++++- PCbuild/pythoncore.vcxproj | 1 + PCbuild/pythoncore.vcxproj.filters | 3 + Tools/clinic/clinic.py | 64 ++++- 19 files changed, 797 insertions(+), 51 deletions(-) create mode 100644 Include/cpython/methodobject.h create mode 100644 Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst create mode 100644 Modules/clinic/_testmultiphase.c.h diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index fc3467bee4d3cf..72c94459295c41 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -147,23 +147,56 @@ Implementing functions and methods value of the function as exposed in Python. The function must return a new reference. + The function signature is:: + + PyObject *PyCFunction(PyObject *self, + PyObject *const *args); .. c:type:: PyCFunctionWithKeywords Type of the functions used to implement Python callables in C with signature :const:`METH_VARARGS | METH_KEYWORDS`. + The function signature is:: + + PyObject *PyCFunctionWithKeywords(PyObject *self, + PyObject *const *args, + PyObject *kwargs); .. c:type:: _PyCFunctionFast Type of the functions used to implement Python callables in C with signature :const:`METH_FASTCALL`. + The function signature is:: + PyObject *_PyCFunctionFast(PyObject *self, + PyObject *const *args, + Py_ssize_t nargs); .. c:type:: _PyCFunctionFastWithKeywords Type of the functions used to implement Python callables in C with signature :const:`METH_FASTCALL | METH_KEYWORDS`. + The function signature is:: + + PyObject *_PyCFunctionFastWithKeywords(PyObject *self, + PyObject *const *args, + Py_ssize_t nargs, + PyObject *kwnames); + +.. c:type:: PyCMethod + + Type of the functions used to implement Python callables in C + with signature :const:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS`. + The function signature is:: + + PyObject *PyCMethod(PyObject *self, + PyTypeObject *defining_class, + PyObject *const *args, + Py_ssize_t nargs, + PyObject *kwnames) + + .. versionadded:: 3.9 .. c:type:: PyMethodDef @@ -197,9 +230,7 @@ The :attr:`ml_flags` field is a bitfield which can include the following flags. The individual flags indicate either a calling convention or a binding convention. -There are four basic calling conventions for positional arguments -and two of them can be combined with :const:`METH_KEYWORDS` to support -also keyword arguments. So there are a total of 6 calling conventions: +There are these calling conventions: .. data:: METH_VARARGS @@ -250,6 +281,19 @@ also keyword arguments. So there are a total of 6 calling conventions: .. versionadded:: 3.7 +.. data:: METH_METHOD | METH_FASTCALL | METH_KEYWORDS + + Extension of :const:`METH_FASTCALL | METH_KEYWORDS` supporting the *defining + class*, that is, the class that contains the method in question. + The defining class might be a superclass of ``Py_TYPE(self)``. + + The method needs to be of type :c:type:`PyCMethod`, the same as for + ``METH_FASTCALL | METH_KEYWORDS`` with ``defining_class`` argument added after + ``self``. + + .. versionadded:: 3.9 + + .. data:: METH_NOARGS Methods without parameters don't need to check whether arguments are given if diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index f774ca35edab92..7dd393f47f1b4f 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -109,6 +109,30 @@ Type Objects .. versionadded:: 3.4 +.. c:function:: PyObject* PyType_GetModule(PyTypeObject *type) + + Return the module object associated with the given type when the type was + created using :c:func:`PyType_FromModuleAndSpec`. + + If no module is associated with the given type, sets :py:class:`TypeError` + and returns ``NULL``. + + .. versionadded:: 3.9 + +.. c:function:: void* PyType_GetModuleState(PyTypeObject *type) + + Return the state of the module object associated with the given type. + This is a shortcut for calling :c:func:`PyModule_GetState()` on the result + of :c:func:`PyType_GetModule`. + + If no module is associated with the given type, sets :py:class:`TypeError` + and returns ``NULL``. + + If the *type* has an associated module but its state is ``NULL``, + returns ``NULL`` without setting an exception. + + .. versionadded:: 3.9 + Creating Heap-Allocated Types ............................. @@ -116,7 +140,7 @@ Creating Heap-Allocated Types The following functions and structs are used to create :ref:`heap types `. -.. c:function:: PyObject* PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) +.. c:function:: PyObject* PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) Creates and returns a heap type object from the *spec* (:const:`Py_TPFLAGS_HEAPTYPE`). @@ -127,8 +151,18 @@ The following functions and structs are used to create If *bases* is ``NULL``, the *Py_tp_base* slot is used instead. If that also is ``NULL``, the new type derives from :class:`object`. + The *module* must be a module object or ``NULL``. + If not ``NULL``, the module is associated with the new type and can later be + retreived with :c:func:`PyType_GetModule`. + This function calls :c:func:`PyType_Ready` on the new type. + .. versionadded:: 3.9 + +.. c:function:: PyObject* PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) + + Equivalent to ``PyType_FromModuleAndSpec(NULL, spec, bases)``. + .. versionadded:: 3.3 .. c:function:: PyObject* PyType_FromSpec(PyType_Spec *spec) diff --git a/Include/cpython/methodobject.h b/Include/cpython/methodobject.h new file mode 100644 index 00000000000000..2ac2cbf36aa796 --- /dev/null +++ b/Include/cpython/methodobject.h @@ -0,0 +1,32 @@ +#ifndef Py_CPYTHON_METHODOBJECT_H +# error "this header file must not be included directly" +#endif + +PyAPI_DATA(PyTypeObject) PyCMethod_Type; + +/* Macros for direct access to these values. Type checks are *not* + done, so use with care. */ +#define PyCFunction_GET_FUNCTION(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_meth) +#define PyCFunction_GET_SELF(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \ + NULL : ((PyCFunctionObject *)func) -> m_self) +#define PyCFunction_GET_FLAGS(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_flags) +#define PyCFunction_GET_CLASS(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_METHOD ? \ + ((PyCMethodObject *)func) -> mm_class : NULL) + +typedef struct { + PyObject_HEAD + PyMethodDef *m_ml; /* Description of the C function to call */ + PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */ + PyObject *m_module; /* The __module__ attribute, can be anything */ + PyObject *m_weakreflist; /* List of weak references */ + vectorcallfunc vectorcall; +} PyCFunctionObject; + +typedef struct { + PyCFunctionObject func; + PyTypeObject *mm_class; /* Class that defines this method */ +} PyCMethodObject; diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 45da752ed2e941..8bf05a32711835 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -289,6 +289,7 @@ typedef struct _heaptypeobject { PyBufferProcs as_buffer; PyObject *ht_name, *ht_slots, *ht_qualname; struct _dictkeysobject *ht_cached_keys; + PyObject *ht_module; /* here are optional user slots, followed by the members. */ } PyHeapTypeObject; diff --git a/Include/methodobject.h b/Include/methodobject.h index adb2d9e884fbb0..7c7362cded35b8 100644 --- a/Include/methodobject.h +++ b/Include/methodobject.h @@ -13,7 +13,7 @@ extern "C" { PyAPI_DATA(PyTypeObject) PyCFunction_Type; -#define PyCFunction_Check(op) Py_IS_TYPE(op, &PyCFunction_Type) +#define PyCFunction_Check(op) (Py_IS_TYPE(op, &PyCFunction_Type) || (PyType_IsSubtype(Py_TYPE(op), &PyCFunction_Type))) typedef PyObject *(*PyCFunction)(PyObject *, PyObject *); typedef PyObject *(*_PyCFunctionFast) (PyObject *, PyObject *const *, Py_ssize_t); @@ -22,21 +22,13 @@ typedef PyObject *(*PyCFunctionWithKeywords)(PyObject *, PyObject *, typedef PyObject *(*_PyCFunctionFastWithKeywords) (PyObject *, PyObject *const *, Py_ssize_t, PyObject *); +typedef PyObject *(*PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, + size_t, PyObject *); + PyAPI_FUNC(PyCFunction) PyCFunction_GetFunction(PyObject *); PyAPI_FUNC(PyObject *) PyCFunction_GetSelf(PyObject *); PyAPI_FUNC(int) PyCFunction_GetFlags(PyObject *); -/* Macros for direct access to these values. Type checks are *not* - done, so use with care. */ -#ifndef Py_LIMITED_API -#define PyCFunction_GET_FUNCTION(func) \ - (((PyCFunctionObject *)func) -> m_ml -> ml_meth) -#define PyCFunction_GET_SELF(func) \ - (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \ - NULL : ((PyCFunctionObject *)func) -> m_self) -#define PyCFunction_GET_FLAGS(func) \ - (((PyCFunctionObject *)func) -> m_ml -> ml_flags) -#endif Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyCFunction_Call(PyObject *, PyObject *, PyObject *); struct PyMethodDef { @@ -52,6 +44,13 @@ typedef struct PyMethodDef PyMethodDef; PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, PyObject *); +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000 +#define PyCFunction_NewEx(ML, SELF, MOD) PyCMethod_New((ML), (SELF), (MOD), NULL) +PyAPI_FUNC(PyObject *) PyCMethod_New(PyMethodDef *, PyObject *, + PyObject *, PyTypeObject *); +#endif + + /* Flag passed to newmethodobject */ /* #define METH_OLDARGS 0x0000 -- unsupported now */ #define METH_VARARGS 0x0001 @@ -84,15 +83,24 @@ PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, #define METH_STACKLESS 0x0000 #endif +/* METH_METHOD means the function stores an + * additional reference to the class that defines it; + * both self and class are passed to it. + * It uses PyCMethodObject instead of PyCFunctionObject. + * May not be combined with METH_NOARGS, METH_O, METH_CLASS or METH_STATIC. + */ + +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000 +#define METH_METHOD 0x0200 +#endif + + #ifndef Py_LIMITED_API -typedef struct { - PyObject_HEAD - PyMethodDef *m_ml; /* Description of the C function to call */ - PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */ - PyObject *m_module; /* The __module__ attribute, can be anything */ - PyObject *m_weakreflist; /* List of weak references */ - vectorcallfunc vectorcall; -} PyCFunctionObject; + +#define Py_CPYTHON_METHODOBJECT_H +#include "cpython/methodobject.h" +#undef Py_CPYTHON_METHODOBJECT_H + #endif #ifdef __cplusplus diff --git a/Include/object.h b/Include/object.h index 6c30809124dea8..514d934196f571 100644 --- a/Include/object.h +++ b/Include/object.h @@ -213,6 +213,11 @@ PyAPI_FUNC(PyObject*) PyType_FromSpecWithBases(PyType_Spec*, PyObject*); #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03040000 PyAPI_FUNC(void*) PyType_GetSlot(PyTypeObject*, int); #endif +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000 +PyAPI_FUNC(PyObject*) PyType_FromModuleAndSpec(PyObject *, PyType_Spec *, PyObject *); +PyAPI_FUNC(PyObject *) PyType_GetModule(struct _typeobject *); +PyAPI_FUNC(void *) PyType_GetModuleState(struct _typeobject *); +#endif /* Generic type check */ PyAPI_FUNC(int) PyType_IsSubtype(PyTypeObject *, PyTypeObject *); diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index f9578d3afa81f3..5c7526aa7ec29a 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -13,6 +13,8 @@ import time import unittest import weakref +import importlib.machinery +import importlib.util from test import support from test.support import MISSING_C_DOCSTRINGS from test.support.script_helper import assert_python_failure, assert_python_ok @@ -774,5 +776,76 @@ class PyMemDefaultTests(PyMemDebugTests): PYTHONMALLOC = '' +class Test_ModuleStateAccess(unittest.TestCase): + """Test access to module start (PEP 573)""" + + # The C part of the tests lives in _testmultiphase, in a module called + # _testmultiphase_meth_state_access. + # This module has multi-phase initialization, unlike _testcapi. + + def setUp(self): + fullname = '_testmultiphase_meth_state_access' # XXX + origin = importlib.util.find_spec('_testmultiphase').origin + loader = importlib.machinery.ExtensionFileLoader(fullname, origin) + spec = importlib.util.spec_from_loader(fullname, loader) + module = importlib.util.module_from_spec(spec) + loader.exec_module(module) + self.module = module + + def test_subclass_get_module(self): + """PyType_GetModule for defining_class""" + class StateAccessType_Subclass(self.module.StateAccessType): + pass + + instance = StateAccessType_Subclass() + self.assertIs(instance.get_defining_module(), self.module) + + def test_subclass_get_module_with_super(self): + class StateAccessType_Subclass(self.module.StateAccessType): + def get_defining_module(self): + return super().get_defining_module() + + instance = StateAccessType_Subclass() + self.assertIs(instance.get_defining_module(), self.module) + + def test_state_access(self): + """Checks methods defined with and without argument clinic + + This tests a no-arg method (get_count) and a method with + both a positional and keyword argument. + """ + + a = self.module.StateAccessType() + b = self.module.StateAccessType() + + methods = { + 'clinic': a.increment_count_clinic, + 'noclinic': a.increment_count_noclinic, + } + + for name, increment_count in methods.items(): + with self.subTest(name): + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 0) + + increment_count() + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 1) + + increment_count(3) + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 4) + + increment_count(-2, twice=True) + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 0) + + with self.assertRaises(TypeError): + increment_count(thrice=3) + + with self.assertRaises(TypeError): + increment_count(1, 2, 3) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 91a645b460ec02..33b34593a0af97 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1322,7 +1322,7 @@ def delx(self): del self.__x '3P' # PyMappingMethods '10P' # PySequenceMethods '2P' # PyBufferProcs - '4P') + '5P') class newstyleclass(object): pass # Separate block for PyDictKeysObject with 8 keys and 5 entries check(newstyleclass, s + calcsize("2nP2n0P") + 8 + 5*calcsize("n2P")) diff --git a/Makefile.pre.in b/Makefile.pre.in index 3cb8b84157f0ed..0d616d304484ce 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1104,6 +1104,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/initconfig.h \ $(srcdir)/Include/cpython/interpreteridobject.h \ $(srcdir)/Include/cpython/listobject.h \ + $(srcdir)/Include/cpython/methodobject.h \ $(srcdir)/Include/cpython/object.h \ $(srcdir)/Include/cpython/objimpl.h \ $(srcdir)/Include/cpython/pyerrors.h \ diff --git a/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst b/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst new file mode 100644 index 00000000000000..785ea323c316de --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst @@ -0,0 +1,2 @@ +Module C state is now accessible from C-defined heap type methods (:pep:`573`). +Patch by Marcel Plch and Petr Viktorin. diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index eadc46fbf18675..3084fc12a5ef51 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -4,6 +4,19 @@ #include "Python.h" +/* State for testing module state access from methods */ + +typedef struct { + int counter; +} meth_state; + +/*[clinic input] +module _testmultiphase + +class _testmultiphase.StateAccessType "StateAccessTypeObject *" "!StateAccessType" +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=bab9f2fe3bd312ff]*/ + /* Example objects */ typedef struct { PyObject_HEAD @@ -14,6 +27,10 @@ typedef struct { PyObject *integer; } testmultiphase_state; +typedef struct { + PyObject_HEAD +} StateAccessTypeObject; + /* Example methods */ static int @@ -42,6 +59,7 @@ Example_demo(ExampleObject *self, PyObject *args) Py_RETURN_NONE; } +#include "clinic/_testmultiphase.c.h" static PyMethodDef Example_methods[] = { {"demo", (PyCFunction)Example_demo, METH_VARARGS, @@ -102,6 +120,150 @@ static PyType_Spec Example_Type_spec = { Example_Type_slots }; + +/*[clinic input] +_testmultiphase.StateAccessType.get_defining_module + + cls: defining_class + +Return the module of the defining class. +[clinic start generated code]*/ + +static PyObject * +_testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject *self, + PyTypeObject *cls) +/*[clinic end generated code: output=ba2a14284a5d0921 input=946149f91cf72c0d]*/ +{ + PyObject *retval; + retval = PyType_GetModule(cls); + if (retval == NULL) { + return NULL; + } + Py_INCREF(retval); + return retval; +} + +/*[clinic input] +_testmultiphase.StateAccessType.increment_count_clinic + + cls: defining_class + / + n: int = 1 + * + twice: bool = False + +Add 'n' from the module-state counter. + +Pass 'twice' to double that amount. + +This tests Argument Clinic support for defining_class. +[clinic start generated code]*/ + +static PyObject * +_testmultiphase_StateAccessType_increment_count_clinic_impl(StateAccessTypeObject *self, + PyTypeObject *cls, + int n, int twice) +/*[clinic end generated code: output=3b34f86bc5473204 input=551d482e1fe0b8f5]*/ +{ + meth_state *m_state = PyType_GetModuleState(cls); + if (twice) { + n *= 2; + } + m_state->counter += n; + + Py_RETURN_NONE; +} + +PyDoc_STRVAR(_StateAccessType_decrement_count__doc__, +"decrement_count($self, /, n=1, *, twice=None)\n" +"--\n" +"\n" +"Add 'n' from the module-state counter.\n" +"Pass 'twice' to double that amount.\n" +"(This is to test both positional and keyword arguments."); + +// Intentionally does not use Argument Clinic +static PyObject * +_StateAccessType_increment_count_noclinic(StateAccessTypeObject *self, + PyTypeObject *defining_class, + PyObject *const *args, + Py_ssize_t nargs, + PyObject *kwnames) +{ + if (!_PyArg_CheckPositional("StateAccessTypeObject.decrement_count", nargs, 0, 1)) { + return NULL; + } + long n = 1; + if (nargs) { + n = PyLong_AsLong(args[0]); + if (PyErr_Occurred()) { + return NULL; + } + } + if (kwnames && PyTuple_Check(kwnames)) { + if (PyTuple_GET_SIZE(kwnames) > 1 || + PyUnicode_CompareWithASCIIString( + PyTuple_GET_ITEM(kwnames, 0), + "twice" + )) { + PyErr_SetString( + PyExc_TypeError, + "decrement_count only takes 'twice' keyword argument" + ); + return NULL; + } + n *= 2; + } + meth_state *m_state = PyType_GetModuleState(defining_class); + m_state->counter += n; + + Py_RETURN_NONE; +} + +/*[clinic input] +_testmultiphase.StateAccessType.get_count + + cls: defining_class + +Return the value of the module-state counter. +[clinic start generated code]*/ + +static PyObject * +_testmultiphase_StateAccessType_get_count_impl(StateAccessTypeObject *self, + PyTypeObject *cls) +/*[clinic end generated code: output=64600f95b499a319 input=d5d181f12384849f]*/ +{ + meth_state *m_state = PyType_GetModuleState(cls); + return PyLong_FromLong(m_state->counter); +} + +static PyMethodDef StateAccessType_methods[] = { + _TESTMULTIPHASE_STATEACCESSTYPE_GET_DEFINING_MODULE_METHODDEF + _TESTMULTIPHASE_STATEACCESSTYPE_GET_COUNT_METHODDEF + _TESTMULTIPHASE_STATEACCESSTYPE_INCREMENT_COUNT_CLINIC_METHODDEF + { + "increment_count_noclinic", + (PyCFunction)(void(*)(void))_StateAccessType_increment_count_noclinic, + METH_METHOD|METH_FASTCALL|METH_KEYWORDS, + _StateAccessType_decrement_count__doc__ + }, + {NULL, NULL} /* sentinel */ +}; + +static PyType_Slot StateAccessType_Type_slots[] = { + {Py_tp_doc, "Type for testing per-module state access from methods."}, + {Py_tp_methods, StateAccessType_methods}, + {0, NULL} +}; + +static PyType_Spec StateAccessType_spec = { + "_testimportexec.StateAccessType", + sizeof(StateAccessTypeObject), + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_FINALIZE | Py_TPFLAGS_BASETYPE, + StateAccessType_Type_slots +}; + /* Function of two integers returning integer */ PyDoc_STRVAR(testexport_foo_doc, @@ -193,30 +355,39 @@ static int execfunc(PyObject *m) /* Add a custom type */ temp = PyType_FromSpec(&Example_Type_spec); - if (temp == NULL) + if (temp == NULL) { goto fail; - if (PyModule_AddObject(m, "Example", temp) != 0) + } + if (PyModule_AddObject(m, "Example", temp) != 0) { goto fail; + } + /* Add an exception type */ temp = PyErr_NewException("_testimportexec.error", NULL, NULL); - if (temp == NULL) + if (temp == NULL) { goto fail; - if (PyModule_AddObject(m, "error", temp) != 0) + } + if (PyModule_AddObject(m, "error", temp) != 0) { goto fail; + } /* Add Str */ temp = PyType_FromSpec(&Str_Type_spec); - if (temp == NULL) + if (temp == NULL) { goto fail; - if (PyModule_AddObject(m, "Str", temp) != 0) + } + if (PyModule_AddObject(m, "Str", temp) != 0) { goto fail; + } - if (PyModule_AddIntConstant(m, "int_const", 1969) != 0) + if (PyModule_AddIntConstant(m, "int_const", 1969) != 0) { goto fail; + } - if (PyModule_AddStringConstant(m, "str_const", "something different") != 0) + if (PyModule_AddStringConstant(m, "str_const", "something different") != 0) { goto fail; + } return 0; fail: @@ -620,6 +791,54 @@ PyInit__testmultiphase_exec_unreported_exception(PyObject *spec) return PyModuleDef_Init(&def_exec_unreported_exception); } +static int +meth_state_access_exec(PyObject *m) +{ + PyObject *temp; + meth_state *m_state; + + m_state = PyModule_GetState(m); + if (m_state == NULL) { + return -1; + } + + temp = PyType_FromModuleAndSpec(m, &StateAccessType_spec, NULL); + if (temp == NULL) { + return -1; + } + if (PyModule_AddObject(m, "StateAccessType", temp) != 0) { + return -1; + } + + + return 0; +} + +static PyModuleDef_Slot meth_state_access_slots[] = { + {Py_mod_exec, meth_state_access_exec}, + {0, NULL} +}; + +static PyModuleDef def_meth_state_access = { + PyModuleDef_HEAD_INIT, /* m_base */ + "_testmultiphase_meth_state_access", /* m_name */ + PyDoc_STR("Module testing access" + " to state from methods."), + sizeof(meth_state), /* m_size */ + NULL, /* m_methods */ + meth_state_access_slots, /* m_slots */ + 0, /* m_traverse */ + 0, /* m_clear */ + 0, /* m_free */ +}; + +PyMODINIT_FUNC +PyInit__testmultiphase_meth_state_access(PyObject *spec) +{ + return PyModuleDef_Init(&def_meth_state_access); +} + + /*** Helper for imp test ***/ static PyModuleDef imp_dummy_def = TEST_MODULE_DEF("imp_dummy", main_slots, testexport_methods); diff --git a/Modules/clinic/_testmultiphase.c.h b/Modules/clinic/_testmultiphase.c.h new file mode 100644 index 00000000000000..0d38c230f71865 --- /dev/null +++ b/Modules/clinic/_testmultiphase.c.h @@ -0,0 +1,101 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +PyDoc_STRVAR(_testmultiphase_StateAccessType_get_defining_module__doc__, +"get_defining_module($self, /)\n" +"--\n" +"\n" +"Return the module of the defining class."); + +#define _TESTMULTIPHASE_STATEACCESSTYPE_GET_DEFINING_MODULE_METHODDEF \ + {"get_defining_module", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_get_defining_module, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_get_defining_module__doc__}, + +static PyObject * +_testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject *self, + PyTypeObject *cls); + +static PyObject * +_testmultiphase_StateAccessType_get_defining_module(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":get_defining_module", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _testmultiphase_StateAccessType_get_defining_module_impl(self, cls); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testmultiphase_StateAccessType_increment_count_clinic__doc__, +"increment_count_clinic($self, /, n=1, *, twice=False)\n" +"--\n" +"\n" +"Add \'n\' from the module-state counter.\n" +"\n" +"Pass \'twice\' to double that amount.\n" +"\n" +"This tests Argument Clinic support for defining_class."); + +#define _TESTMULTIPHASE_STATEACCESSTYPE_INCREMENT_COUNT_CLINIC_METHODDEF \ + {"increment_count_clinic", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_increment_count_clinic, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_increment_count_clinic__doc__}, + +static PyObject * +_testmultiphase_StateAccessType_increment_count_clinic_impl(StateAccessTypeObject *self, + PyTypeObject *cls, + int n, int twice); + +static PyObject * +_testmultiphase_StateAccessType_increment_count_clinic(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"n", "twice", NULL}; + static _PyArg_Parser _parser = {"|i$p:increment_count_clinic", _keywords, 0}; + int n = 1; + int twice = 0; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &n, &twice)) { + goto exit; + } + return_value = _testmultiphase_StateAccessType_increment_count_clinic_impl(self, cls, n, twice); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testmultiphase_StateAccessType_get_count__doc__, +"get_count($self, /)\n" +"--\n" +"\n" +"Return the value of the module-state counter."); + +#define _TESTMULTIPHASE_STATEACCESSTYPE_GET_COUNT_METHODDEF \ + {"get_count", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_get_count, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_get_count__doc__}, + +static PyObject * +_testmultiphase_StateAccessType_get_count_impl(StateAccessTypeObject *self, + PyTypeObject *cls); + +static PyObject * +_testmultiphase_StateAccessType_get_count(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":get_count", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _testmultiphase_StateAccessType_get_count_impl(self, cls); + +exit: + return return_value; +} +/*[clinic end generated code: output=39eea487e94e7f5d input=a9049054013a1b77]*/ diff --git a/Objects/descrobject.c b/Objects/descrobject.c index 572baa5e312d26..c9754a11b89be1 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -127,7 +127,11 @@ classmethod_get(PyMethodDescrObject *descr, PyObject *obj, PyObject *type) ((PyTypeObject *)type)->tp_name); return NULL; } - return PyCFunction_NewEx(descr->d_method, type, NULL); + PyTypeObject *cls = NULL; + if (descr->d_method->ml_flags & METH_METHOD) { + cls = descr->d_common.d_type; + } + return PyCMethod_New(descr->d_method, type, NULL, cls); } static PyObject * @@ -137,7 +141,19 @@ method_get(PyMethodDescrObject *descr, PyObject *obj, PyObject *type) if (descr_check((PyDescrObject *)descr, obj, &res)) return res; - return PyCFunction_NewEx(descr->d_method, obj, NULL); + if (descr->d_method->ml_flags & METH_METHOD) { + if (PyType_Check(type)) { + return PyCMethod_New(descr->d_method, obj, NULL, descr->d_common.d_type); + } else { + PyErr_Format(PyExc_TypeError, + "descriptor '%V' needs a type, not '%s', as arg 2", + descr_name((PyDescrObject *)descr), + Py_TYPE(type)->tp_name); + return NULL; + } + } else { + return PyCFunction_NewEx(descr->d_method, obj, NULL); + } } static PyObject * @@ -335,6 +351,27 @@ method_vectorcall_VARARGS_KEYWORDS( return result; } +static PyObject * +method_vectorcall_FASTCALL_KEYWORDS_METHOD( + PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + PyThreadState *tstate = _PyThreadState_GET(); + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); + if (method_check_args(func, args, nargs, NULL)) { + return NULL; + } + NULL; + PyCMethod meth = (PyCMethod) method_enter_call(tstate, func); + if (meth == NULL) { + return NULL; + } + PyObject *result = meth(args[0], + ((PyMethodDescrObject *)func)->d_common.d_type, + args+1, nargs-1, kwnames); + Py_LeaveRecursiveCall(); + return result; +} + static PyObject * method_vectorcall_FASTCALL( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) @@ -868,7 +905,8 @@ PyDescr_NewMethod(PyTypeObject *type, PyMethodDef *method) { /* Figure out correct vectorcall function to use */ vectorcallfunc vectorcall; - switch (method->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS)) + switch (method->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | + METH_O | METH_KEYWORDS | METH_METHOD)) { case METH_VARARGS: vectorcall = method_vectorcall_VARARGS; @@ -888,6 +926,9 @@ PyDescr_NewMethod(PyTypeObject *type, PyMethodDef *method) case METH_O: vectorcall = method_vectorcall_O; break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + vectorcall = method_vectorcall_FASTCALL_KEYWORDS_METHOD; + break; default: PyErr_Format(PyExc_SystemError, "%s() method: bad call flags", method->ml_name); diff --git a/Objects/methodobject.c b/Objects/methodobject.c index 20eba6fa8643bf..5659f2143d1823 100644 --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -10,12 +10,16 @@ /* undefine macro trampoline to PyCFunction_NewEx */ #undef PyCFunction_New +/* undefine macro trampoline to PyCMethod_New */ +#undef PyCFunction_NewEx /* Forward declarations */ static PyObject * cfunction_vectorcall_FASTCALL( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * cfunction_vectorcall_FASTCALL_KEYWORDS( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * cfunction_vectorcall_FASTCALL_KEYWORDS_METHOD( + PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * cfunction_vectorcall_NOARGS( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * cfunction_vectorcall_O( @@ -32,10 +36,17 @@ PyCFunction_New(PyMethodDef *ml, PyObject *self) PyObject * PyCFunction_NewEx(PyMethodDef *ml, PyObject *self, PyObject *module) +{ + return PyCMethod_New(ml, self, module, NULL); +} + +PyObject * +PyCMethod_New(PyMethodDef *ml, PyObject *self, PyObject *module, PyTypeObject *cls) { /* Figure out correct vectorcall function to use */ vectorcallfunc vectorcall; - switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS)) + switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | + METH_O | METH_KEYWORDS | METH_METHOD)) { case METH_VARARGS: case METH_VARARGS | METH_KEYWORDS: @@ -55,17 +66,44 @@ PyCFunction_NewEx(PyMethodDef *ml, PyObject *self, PyObject *module) case METH_O: vectorcall = cfunction_vectorcall_O; break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + vectorcall = cfunction_vectorcall_FASTCALL_KEYWORDS_METHOD; + break; default: PyErr_Format(PyExc_SystemError, "%s() method: bad call flags", ml->ml_name); return NULL; } - PyCFunctionObject *op = - PyObject_GC_New(PyCFunctionObject, &PyCFunction_Type); - if (op == NULL) { - return NULL; + PyCFunctionObject *op = NULL; + + if (ml->ml_flags & METH_METHOD) { + if (!cls) { + PyErr_SetString(PyExc_SystemError, + "attempting to create PyCMethod with a METH_METHOD " + "flag but no class"); + return NULL; + } + PyCMethodObject *om = PyObject_GC_New(PyCMethodObject, &PyCMethod_Type); + if (om == NULL) { + return NULL; + } + Py_INCREF(cls); + om->mm_class = cls; + op = (PyCFunctionObject *)om; + } else { + if (cls) { + PyErr_SetString(PyExc_SystemError, + "attempting to create PyCFunction with class " + "but no METH_METHOD flag"); + return NULL; + } + op = PyObject_GC_New(PyCFunctionObject, &PyCFunction_Type); + if (op == NULL) { + return NULL; + } } + op->m_weakreflist = NULL; op->m_ml = ml; Py_XINCREF(self); @@ -107,6 +145,16 @@ PyCFunction_GetFlags(PyObject *op) return PyCFunction_GET_FLAGS(op); } +PyTypeObject * +PyCMethod_GetClass(PyObject *op) +{ + if (!PyCFunction_Check(op)) { + PyErr_BadInternalCall(); + return NULL; + } + return PyCFunction_GET_CLASS(op); +} + /* Methods (the standard built-in methods, that is) */ static void @@ -118,6 +166,7 @@ meth_dealloc(PyCFunctionObject *m) } Py_XDECREF(m->m_self); Py_XDECREF(m->m_module); + Py_XDECREF(PyCFunction_GET_CLASS(m)); PyObject_GC_Del(m); } @@ -196,6 +245,7 @@ meth_traverse(PyCFunctionObject *m, visitproc visit, void *arg) { Py_VISIT(m->m_self); Py_VISIT(m->m_module); + Py_VISIT(PyCFunction_GET_CLASS(m)); return 0; } @@ -314,6 +364,13 @@ PyTypeObject PyCFunction_Type = { 0, /* tp_dict */ }; +PyTypeObject PyCMethod_Type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + .tp_name = "builtin_method", + .tp_basicsize = sizeof(PyCMethodObject), + .tp_base = &PyCFunction_Type, +}; + /* Vectorcall functions for each of the PyCFunction calling conventions, * except for METH_VARARGS (possibly combined with METH_KEYWORDS) which * doesn't use vectorcall. @@ -385,6 +442,22 @@ cfunction_vectorcall_FASTCALL_KEYWORDS( return result; } +static PyObject * +cfunction_vectorcall_FASTCALL_KEYWORDS_METHOD( + PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + PyThreadState *tstate = _PyThreadState_GET(); + PyTypeObject *cls = PyCFunction_GET_CLASS(func); + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); + PyCMethod meth = (PyCMethod)cfunction_enter_call(tstate, func); + if (meth == NULL) { + return NULL; + } + PyObject *result = meth(PyCFunction_GET_SELF(func), cls, args, nargs, kwnames); + _Py_LeaveRecursiveCall(tstate); + return result; +} + static PyObject * cfunction_vectorcall_NOARGS( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) diff --git a/Objects/object.c b/Objects/object.c index 75ea92ad9005c9..623ee52eb1e22d 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -1789,6 +1789,7 @@ _PyTypes_Init(void) INIT_TYPE(&PyCode_Type, "code"); INIT_TYPE(&PyFrame_Type, "frame"); INIT_TYPE(&PyCFunction_Type, "builtin function"); + INIT_TYPE(&PyCMethod_Type, "builtin method"); INIT_TYPE(&PyMethod_Type, "method"); INIT_TYPE(&PyFunction_Type, "function"); INIT_TYPE(&PyDictProxy_Type, "dict proxy"); diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 0d5600b4ce4faf..525f5ac5d5775a 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2708,6 +2708,9 @@ type_new(PyTypeObject *metatype, PyObject *args, PyObject *kwds) if (qualname != NULL && _PyDict_DelItemId(dict, &PyId___qualname__) < 0) goto error; + /* Set ht_module */ + et->ht_module = NULL; + /* Set tp_doc to a copy of dict['__doc__'], if the latter is there and is a string. The __doc__ accessor will first look for tp_doc; if that fails, it will still look into __dict__. @@ -2939,6 +2942,12 @@ PyType_FromSpec_tp_traverse(PyObject *self, visitproc visit, void *arg) PyObject * PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) +{ + return PyType_FromModuleAndSpec(NULL, spec, bases); +} + +PyObject * +PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { PyHeapTypeObject *res; PyObject *modname; @@ -2998,6 +3007,9 @@ PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) Py_INCREF(res->ht_qualname); type->tp_name = spec->name; + Py_XINCREF(module); + res->ht_module = module; + /* Adjust for empty tuple bases */ if (!bases) { base = &PyBaseObject_Type; @@ -3176,6 +3188,40 @@ PyType_GetSlot(PyTypeObject *type, int slot) return *(void**)(((char*)type) + slotoffsets[slot]); } +PyObject * +PyType_GetModule(PyTypeObject *type) +{ + assert(PyType_Check(type)); + if (!_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) { + PyErr_Format( + PyExc_TypeError, + "PyType_GetModule: Type '%s' is not a heap type", + type->tp_name); + return NULL; + } + + PyHeapTypeObject* et = (PyHeapTypeObject*)type; + if (!et->ht_module) { + PyErr_Format( + PyExc_TypeError, + "PyType_GetModule: Type '%s' has no associated module", + type->tp_name); + return NULL; + } + return et->ht_module; + +} + +void * +PyType_GetModuleState(PyTypeObject *type) +{ + PyObject *m = PyType_GetModule(type); + if (m == NULL) { + return NULL; + } + return PyModule_GetState(m); +} + /* Internal API to look for a name through the MRO, bypassing the method cache. This returns a borrowed reference, and might set an exception. 'error' is set to: -1: error with exception; 1: error without exception; 0: ok */ @@ -3503,8 +3549,10 @@ type_dealloc(PyTypeObject *type) Py_XDECREF(et->ht_name); Py_XDECREF(et->ht_qualname); Py_XDECREF(et->ht_slots); - if (et->ht_cached_keys) + if (et->ht_cached_keys) { _PyDictKeys_DecRef(et->ht_cached_keys); + } + Py_XDECREF(et->ht_module); Py_TYPE(type)->tp_free((PyObject *)type); } @@ -3694,6 +3742,7 @@ type_traverse(PyTypeObject *type, visitproc visit, void *arg) Py_VISIT(type->tp_mro); Py_VISIT(type->tp_bases); Py_VISIT(type->tp_base); + Py_VISIT(((PyHeapTypeObject *)type)->ht_module); /* There's no need to visit type->tp_subclasses or ((PyHeapTypeObject *)type)->ht_slots, because they can't be involved @@ -3715,10 +3764,13 @@ type_clear(PyTypeObject *type) the dict, so that other objects caught in a reference cycle don't start calling destroyed methods. - Otherwise, the only field we need to clear is tp_mro, which is + Otherwise, the we need to clear tp_mro, which is part of a hard cycle (its first element is the class itself) that won't be broken otherwise (it's a tuple and tuples don't have a - tp_clear handler). None of the other fields need to be + tp_clear handler). + We also need to clear ht_module, if present: the module usually holds a + reference to its class. None of the other fields need to be + cleared, and here's why: tp_cache: @@ -3743,8 +3795,11 @@ type_clear(PyTypeObject *type) ((PyHeapTypeObject *)type)->ht_cached_keys = NULL; _PyDictKeys_DecRef(cached_keys); } - if (type->tp_dict) + if (type->tp_dict) { PyDict_Clear(type->tp_dict); + } + Py_CLEAR(((PyHeapTypeObject *)type)->ht_module); + Py_CLEAR(type->tp_mro); return 0; diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 21b51bf5e6ddcf..73274ac9acf557 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -138,6 +138,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index f5c76fa34eb946..254c8fbbea5fb8 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -111,6 +111,9 @@ Include + + Include + Include diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index 382e29a28ab48e..281a749a935cc4 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -657,9 +657,14 @@ def output_templates(self, f): if not p.is_optional(): min_pos = i + requires_defining_class = any( + isinstance(p.converter, defining_class_converter) + for p in parameters) + meth_o = (len(parameters) == 1 and parameters[0].is_positional_only() and not converters[0].is_optional() and + not requires_defining_class and not new_or_init) # we have to set these things before we're done: @@ -717,6 +722,11 @@ def output_templates(self, f): {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) """) + parser_prototype_def_class = normalize_snippet(""" + static PyObject * + {c_basename}({self_type}{self_name}, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) + """) + # parser_body_fields remembers the fields passed in to the # previous call to parser_body. this is used for an awful hack. parser_body_fields = () @@ -824,7 +834,7 @@ def parser_body(prototype, *fields, declarations=''): parser_definition = parser_body(parser_prototype, ' {option_group_parsing}') - elif pos_only == len(parameters): + elif not requires_defining_class and pos_only == len(parameters): if not new_or_init: # positional-only, but no option groups # we only need one call to _PyArg_ParseStack @@ -891,7 +901,7 @@ def parser_body(prototype, *fields, declarations=''): parser_prototype = parser_prototype_fastcall_keywords argname_fmt = 'args[%d]' declarations = normalize_snippet(""" - static const char * const _keywords[] = {{{keywords}, NULL}}; + static const char * const _keywords[] = {{{keywords} NULL}}; static _PyArg_Parser _parser = {{NULL, _keywords, "{name}", 0}}; PyObject *argsbuf[%s]; """ % len(converters)) @@ -909,7 +919,7 @@ def parser_body(prototype, *fields, declarations=''): parser_prototype = parser_prototype_keyword argname_fmt = 'fastargs[%d]' declarations = normalize_snippet(""" - static const char * const _keywords[] = {{{keywords}, NULL}}; + static const char * const _keywords[] = {{{keywords} NULL}}; static _PyArg_Parser _parser = {{NULL, _keywords, "{name}", 0}}; PyObject *argsbuf[%s]; PyObject * const *fastargs; @@ -923,6 +933,9 @@ def parser_body(prototype, *fields, declarations=''): goto exit; }} """ % (min_pos, max_pos, min_kw_only), indent=4)] + if requires_defining_class: + flags = 'METH_METHOD|' + flags + parser_prototype = parser_prototype_def_class add_label = None for i, p in enumerate(parameters): @@ -983,11 +996,11 @@ def parser_body(prototype, *fields, declarations=''): parser_code.append("%s:" % add_label) else: declarations = ( - 'static const char * const _keywords[] = {{{keywords}, NULL}};\n' + 'static const char * const _keywords[] = {{{keywords} NULL}};\n' 'static _PyArg_Parser _parser = {{"{format_units}:{name}", _keywords, 0}};') if not new_or_init: parser_code = [normalize_snippet(""" - if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser{parse_arguments_comma} {parse_arguments})) {{ goto exit; }} @@ -1021,6 +1034,9 @@ def parser_body(prototype, *fields, declarations=''): if parses_keywords: assert parses_positional + if requires_defining_class: + raise ValueError("Slot methods cannot access their defining class.") + if not parses_keywords: fields.insert(0, normalize_snippet(""" if ({self_type_check}!_PyArg_NoKeywords("{name}", kwargs)) {{ @@ -1297,9 +1313,13 @@ def render_function(self, clinic, f): template_dict['declarations'] = format_escape("\n".join(data.declarations)) template_dict['initializers'] = "\n\n".join(data.initializers) template_dict['modifications'] = '\n\n'.join(data.modifications) - template_dict['keywords'] = '"' + '", "'.join(data.keywords) + '"' + template_dict['keywords'] = ' '.join('"' + k + '",' for k in data.keywords) template_dict['format_units'] = ''.join(data.format_units) template_dict['parse_arguments'] = ', '.join(data.parse_arguments) + if data.parse_arguments: + template_dict['parse_arguments_comma'] = ','; + else: + template_dict['parse_arguments_comma'] = ''; template_dict['impl_parameters'] = ", ".join(data.impl_parameters) template_dict['impl_arguments'] = ", ".join(data.impl_arguments) template_dict['return_conversion'] = format_escape("".join(data.return_conversion).rstrip()) @@ -2730,6 +2750,25 @@ def parse_arg(self, argname, displayname): """.format(argname=argname, paramname=self.name) return super().parse_arg(argname, displayname) +class defining_class_converter(CConverter): + """ + A special-case converter: + this is the default converter used for the defining class. + """ + type = 'PyTypeObject *' + format_unit = '' + show_in_signature = False + + def converter_init(self, *, type=None): + self.specified_type = type + + def render(self, parameter, data): + self._render_self(parameter, data) + + def set_template_dict(self, template_dict): + template_dict['defining_class_name'] = self.name + + class char_converter(CConverter): type = 'char' default_type = (bytes, bytearray) @@ -4508,6 +4547,19 @@ def bad_node(self, node): else: fail("A 'self' parameter, if specified, must be the very first thing in the parameter block.") + if isinstance(converter, defining_class_converter): + _lp = len(self.function.parameters) + if _lp == 1: + if (self.parameter_state != self.ps_required): + fail("A 'defining_class' parameter cannot be marked optional.") + if value is not unspecified: + fail("A 'defining_class' parameter cannot have a default value.") + if self.group: + fail("A 'defining_class' parameter cannot be in an optional group.") + else: + fail("A 'defining_class' parameter, if specified, must either be the first thing in the parameter block, or come just after 'self'.") + + p = Parameter(parameter_name, kind, function=self.function, converter=converter, default=value, group=self.group) if parameter_name in self.function.parameters: From 8963a7f1f84a05412178b56629508b660d38861b Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 7 May 2020 15:42:33 +0200 Subject: [PATCH 037/115] bpo-40545: Export _PyErr_GetTopmostException() function (GH-19978) Declare _PyErr_GetTopmostException() with PyAPI_FUNC() to properly export the function in the C API. The function remains private ("_Py") prefix. Co-Authored-By: Julien Danjou --- Include/cpython/pyerrors.h | 2 +- .../NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h index cdd052026c1ba2..dd3c2caa0cc043 100644 --- a/Include/cpython/pyerrors.h +++ b/Include/cpython/pyerrors.h @@ -75,7 +75,7 @@ typedef PyOSErrorObject PyWindowsErrorObject; /* Error handling definitions */ PyAPI_FUNC(void) _PyErr_SetKeyError(PyObject *); -_PyErr_StackItem *_PyErr_GetTopmostException(PyThreadState *tstate); +PyAPI_FUNC(_PyErr_StackItem*) _PyErr_GetTopmostException(PyThreadState *tstate); PyAPI_FUNC(void) _PyErr_GetExcInfo(PyThreadState *, PyObject **, PyObject **, PyObject **); /* Context manipulation (PEP 3134) */ diff --git a/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst b/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst new file mode 100644 index 00000000000000..d7f256a2a6b527 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst @@ -0,0 +1,3 @@ +Declare ``_PyErr_GetTopmostException()`` with ``PyAPI_FUNC()`` to properly +export the function in the C API. The function remains private (``_Py``) +prefix. From a1d9e0accd33af1d8e90fc48b34c13d7b07dcf57 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 7 May 2020 08:56:01 -0600 Subject: [PATCH 038/115] bpo-32604: [_xxsubinterpreters] Propagate exceptions. (GH-19768) (Note: PEP 554 is not accepted and the implementation in the code base is a private one for use in the test suite.) If code running in a subinterpreter raises an uncaught exception then the "run" call in the calling interpreter fails. A RunFailedError is raised there that summarizes the original exception as a string. The actual exception type, __cause__, __context__, state, etc. are all discarded. This turned out to be functionally insufficient in practice. There is a more helpful solution (and PEP 554 has been updated appropriately). This change adds the exception propagation behavior described in PEP 554 to the _xxsubinterpreters module. With this change a copy of the original exception is set to __cause__ on the RunFailedError. For now we are using "pickle", which preserves the exception's state. We also preserve the original __cause__, __context__, and __traceback__ (since "pickle" does not preserve those). https://bugs.python.org/issue32604 --- Lib/test/test__xxsubinterpreters.py | 301 ++++++- Modules/_xxsubinterpretersmodule.c | 1139 ++++++++++++++++++++++++--- 2 files changed, 1317 insertions(+), 123 deletions(-) diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index e17bfde2c2f75a..039c040ad39508 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -1,3 +1,4 @@ +import builtins from collections import namedtuple import contextlib import itertools @@ -866,10 +867,11 @@ def assert_run_failed(self, exctype, msg=None): yield if msg is None: self.assertEqual(str(caught.exception).split(':')[0], - str(exctype)) + exctype.__name__) else: self.assertEqual(str(caught.exception), - "{}: {}".format(exctype, msg)) + "{}: {}".format(exctype.__name__, msg)) + self.assertIsInstance(caught.exception.__cause__, exctype) def test_invalid_syntax(self): with self.assert_run_failed(SyntaxError): @@ -1060,6 +1062,301 @@ def f(): self.assertEqual(retcode, 0) +def build_exception(exctype, /, *args, **kwargs): + # XXX Use __qualname__? + name = exctype.__name__ + argreprs = [repr(a) for a in args] + if kwargs: + kwargreprs = [f'{k}={v!r}' for k, v in kwargs.items()] + script = f'{name}({", ".join(argreprs)}, {", ".join(kwargreprs)})' + else: + script = f'{name}({", ".join(argreprs)})' + expected = exctype(*args, **kwargs) + return script, expected + + +def build_exceptions(self, *exctypes, default=None, custom=None, bases=True): + if not exctypes: + raise NotImplementedError + if not default: + default = ((), {}) + elif isinstance(default, str): + default = ((default,), {}) + elif type(default) is not tuple: + raise NotImplementedError + elif len(default) != 2: + default = (default, {}) + elif type(default[0]) is not tuple: + default = (default, {}) + elif type(default[1]) is not dict: + default = (default, {}) + # else leave it alone + + for exctype in exctypes: + customtype = None + values = default + if custom: + if exctype in custom: + customtype = exctype + elif bases: + for customtype in custom: + if issubclass(exctype, customtype): + break + else: + customtype = None + if customtype is not None: + values = custom[customtype] + if values is None: + continue + args, kwargs = values + script, expected = build_exception(exctype, *args, **kwargs) + yield exctype, customtype, script, expected + + +try: + raise Exception +except Exception as exc: + assert exc.__traceback__ is not None + Traceback = type(exc.__traceback__) + + +class RunFailedTests(TestBase): + + BUILTINS = [v + for v in vars(builtins).values() + if (type(v) is type + and issubclass(v, Exception) + #and issubclass(v, BaseException) + ) + ] + BUILTINS_SPECIAL = [ + # These all have extra attributes (i.e. args/kwargs) + SyntaxError, + ImportError, + UnicodeError, + OSError, + SystemExit, + StopIteration, + ] + + @classmethod + def build_exceptions(cls, exctypes=None, default=(), custom=None): + if exctypes is None: + exctypes = cls.BUILTINS + if custom is None: + # Skip the "special" ones. + custom = {et: None for et in cls.BUILTINS_SPECIAL} + yield from build_exceptions(*exctypes, default=default, custom=custom) + + def assertExceptionsEqual(self, exc, expected, *, chained=True): + if type(expected) is type: + self.assertIs(type(exc), expected) + return + elif not isinstance(exc, Exception): + self.assertEqual(exc, expected) + elif not isinstance(expected, Exception): + self.assertEqual(exc, expected) + else: + # Plain equality doesn't work, so we have to compare manually. + self.assertIs(type(exc), type(expected)) + self.assertEqual(exc.args, expected.args) + self.assertEqual(exc.__reduce__(), expected.__reduce__()) + if chained: + self.assertExceptionsEqual(exc.__context__, + expected.__context__) + self.assertExceptionsEqual(exc.__cause__, + expected.__cause__) + self.assertEqual(exc.__suppress_context__, + expected.__suppress_context__) + + def assertTracebacksEqual(self, tb, expected): + if not isinstance(tb, Traceback): + self.assertEqual(tb, expected) + elif not isinstance(expected, Traceback): + self.assertEqual(tb, expected) + else: + self.assertEqual(tb.tb_frame.f_code.co_name, + expected.tb_frame.f_code.co_name) + self.assertEqual(tb.tb_frame.f_code.co_filename, + expected.tb_frame.f_code.co_filename) + self.assertEqual(tb.tb_lineno, expected.tb_lineno) + self.assertTracebacksEqual(tb.tb_next, expected.tb_next) + + # XXX Move this to TestBase? + @contextlib.contextmanager + def expected_run_failure(self, expected): + exctype = expected if type(expected) is type else type(expected) + + with self.assertRaises(interpreters.RunFailedError) as caught: + yield caught + exc = caught.exception + + modname = exctype.__module__ + if modname == 'builtins' or modname == '__main__': + exctypename = exctype.__name__ + else: + exctypename = f'{modname}.{exctype.__name__}' + if exctype is expected: + self.assertEqual(str(exc).split(':')[0], exctypename) + else: + self.assertEqual(str(exc), f'{exctypename}: {expected}') + self.assertExceptionsEqual(exc.__cause__, expected) + if exc.__cause__ is not None: + self.assertIsNotNone(exc.__cause__.__traceback__) + + def test_builtin_exceptions(self): + interpid = interpreters.create() + msg = '' + for i, info in enumerate(self.build_exceptions( + default=msg, + custom={ + SyntaxError: ((msg, '', 1, 3, 'a +?'), {}), + ImportError: ((msg,), {'name': 'spam', 'path': '/x/spam.py'}), + UnicodeError: None, + #UnicodeError: ((), {}), + #OSError: ((), {}), + SystemExit: ((1,), {}), + StopIteration: (('',), {}), + }, + )): + exctype, _, script, expected = info + testname = f'{i+1} - {script}' + script = f'raise {script}' + + with self.subTest(testname): + with self.expected_run_failure(expected): + interpreters.run_string(interpid, script) + + def test_custom_exception_from___main__(self): + script = dedent(""" + class SpamError(Exception): + def __init__(self, q): + super().__init__(f'got {q}') + self.q = q + raise SpamError('eggs') + """) + expected = Exception(f'SpamError: got {"eggs"}') + + interpid = interpreters.create() + with self.assertRaises(interpreters.RunFailedError) as caught: + interpreters.run_string(interpid, script) + cause = caught.exception.__cause__ + + self.assertExceptionsEqual(cause, expected) + + class SpamError(Exception): + # The normal Exception.__reduce__() produces a funny result + # here. So we have to use a custom __new__(). + def __new__(cls, q): + if type(q) is SpamError: + return q + return super().__new__(cls, q) + def __init__(self, q): + super().__init__(f'got {q}') + self.q = q + + def test_custom_exception(self): + script = dedent(""" + import test.test__xxsubinterpreters + SpamError = test.test__xxsubinterpreters.RunFailedTests.SpamError + raise SpamError('eggs') + """) + try: + ns = {} + exec(script, ns, ns) + except Exception as exc: + expected = exc + + interpid = interpreters.create() + with self.expected_run_failure(expected): + interpreters.run_string(interpid, script) + + class SpamReducedError(Exception): + def __init__(self, q): + super().__init__(f'got {q}') + self.q = q + def __reduce__(self): + return (type(self), (self.q,), {}) + + def test_custom___reduce__(self): + script = dedent(""" + import test.test__xxsubinterpreters + SpamError = test.test__xxsubinterpreters.RunFailedTests.SpamReducedError + raise SpamError('eggs') + """) + try: + exec(script, (ns := {'__name__': '__main__'}), ns) + except Exception as exc: + expected = exc + + interpid = interpreters.create() + with self.expected_run_failure(expected): + interpreters.run_string(interpid, script) + + def test_traceback_propagated(self): + script = dedent(""" + def do_spam(): + raise Exception('uh-oh') + def do_eggs(): + return do_spam() + class Spam: + def do(self): + return do_eggs() + def get_handler(): + def handler(): + return Spam().do() + return handler + go = (lambda: get_handler()()) + def iter_all(): + yield from (go() for _ in [True]) + yield None + def main(): + for v in iter_all(): + pass + main() + """) + try: + ns = {} + exec(script, ns, ns) + except Exception as exc: + expected = exc + expectedtb = exc.__traceback__.tb_next + + interpid = interpreters.create() + with self.expected_run_failure(expected) as caught: + interpreters.run_string(interpid, script) + exc = caught.exception + + self.assertTracebacksEqual(exc.__cause__.__traceback__, + expectedtb) + + def test_chained_exceptions(self): + script = dedent(""" + try: + raise ValueError('msg 1') + except Exception as exc1: + try: + raise TypeError('msg 2') + except Exception as exc2: + try: + raise IndexError('msg 3') from exc2 + except Exception: + raise AttributeError('msg 4') + """) + try: + exec(script, {}, {}) + except Exception as exc: + expected = exc + + interpid = interpreters.create() + with self.expected_run_failure(expected) as caught: + interpreters.run_string(interpid, script) + exc = caught.exception + + # ...just to be sure. + self.assertIs(type(exc.__cause__), AttributeError) + + ################################## # channel tests diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 8a6fce9e0b4bd9..9c5df16e156a1d 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -1,5 +1,4 @@ - -/* interpreters module */ +/* _interpreters module */ /* low-level access to interpreter primitives */ #include "Python.h" @@ -7,35 +6,921 @@ #include "interpreteridobject.h" +// XXX Emit a warning? +#define IGNORE_FAILURE(msg) \ + fprintf(stderr, " -----\nRunFailedError: %s\n", msg); \ + PyErr_PrintEx(0); \ + fprintf(stderr, " -----\n"); \ + PyErr_Clear(); + +typedef void (*_deallocfunc)(void *); + +static PyInterpreterState * +_get_current(void) +{ + // _PyInterpreterState_Get() aborts if lookup fails, so don't need + // to check the result for NULL. + return _PyInterpreterState_Get(); +} + + +/* string utils *************************************************************/ + +// PyMem_Free() must be used to dealocate the resulting string. static char * -_copy_raw_string(PyObject *strobj) +_strdup_and_size(const char *data, Py_ssize_t *psize, _deallocfunc *dealloc) { - const char *str = PyUnicode_AsUTF8(strobj); - if (str == NULL) { - return NULL; + if (data == NULL) { + if (psize != NULL) { + *psize = 0; + } + if (dealloc != NULL) { + *dealloc = NULL; + } + return ""; + } + + Py_ssize_t size; + if (psize == NULL) { + size = strlen(data); + } else { + size = *psize; + if (size == 0) { + size = strlen(data); + *psize = size; // The size "return" value. + } } - char *copied = PyMem_Malloc(strlen(str)+1); + char *copied = PyMem_Malloc(size+1); if (copied == NULL) { PyErr_NoMemory(); return NULL; } - strcpy(copied, str); + if (dealloc != NULL) { + *dealloc = PyMem_Free; + } + memcpy(copied, data, size+1); return copied; } -static PyInterpreterState * -_get_current(void) +static const char * +_pyobj_get_str_and_size(PyObject *obj, Py_ssize_t *psize) { - // PyInterpreterState_Get() aborts if lookup fails, so don't need - // to check the result for NULL. - return PyInterpreterState_Get(); + if (PyUnicode_Check(obj)) { + return PyUnicode_AsUTF8AndSize(obj, psize); + } else { + const char *data = NULL; + PyBytes_AsStringAndSize(obj, (char **)&data, psize); + return data; + } +} + +/* "raw" strings */ + +typedef struct _rawstring { + Py_ssize_t size; + const char *data; + _deallocfunc dealloc; +} _rawstring; + +static void +_rawstring_init(_rawstring *raw) +{ + raw->size = 0; + raw->data = NULL; + raw->dealloc = NULL; +} + +static _rawstring * +_rawstring_new(void) +{ + _rawstring *raw = PyMem_NEW(_rawstring, 1); + if (raw == NULL) { + PyErr_NoMemory(); + return NULL; + } + _rawstring_init(raw); + return raw; +} + +static void +_rawstring_clear(_rawstring *raw) +{ + if (raw->data != NULL && raw->dealloc != NULL) { + (*raw->dealloc)((void *)raw->data); + } + _rawstring_init(raw); +} + +static void +_rawstring_free(_rawstring *raw) +{ + _rawstring_clear(raw); + PyMem_Free(raw); +} + +static int +_rawstring_is_clear(_rawstring *raw) +{ + return raw->size == 0 && raw->data == NULL && raw->dealloc == NULL; +} + +//static void +//_rawstring_move(_rawstring *raw, _rawstring *src) +//{ +// raw->size = src->size; +// raw->data = src->data; +// raw->dealloc = src->dealloc; +// _rawstring_init(src); +//} + +static void +_rawstring_proxy(_rawstring *raw, const char *str) +{ + if (str == NULL) { + str = ""; + } + raw->size = strlen(str); + raw->data = str; + raw->dealloc = NULL; +} + +static int +_rawstring_buffer(_rawstring *raw, Py_ssize_t size) +{ + raw->data = PyMem_Malloc(size+1); + if (raw->data == NULL) { + PyErr_NoMemory(); + return -1; + } + raw->size = size; + raw->dealloc = PyMem_Free; + return 0; +} + +static int +_rawstring_strcpy(_rawstring *raw, const char *str, Py_ssize_t size) +{ + _deallocfunc dealloc = NULL; + const char *copied = _strdup_and_size(str, &size, &dealloc); + if (copied == NULL) { + return -1; + } + + raw->size = size; + raw->dealloc = dealloc; + raw->data = copied; + return 0; +} + +static int +_rawstring_from_pyobj(_rawstring *raw, PyObject *obj) +{ + Py_ssize_t size = 0; + const char *data = _pyobj_get_str_and_size(obj, &size); + if (PyErr_Occurred()) { + return -1; + } + if (_rawstring_strcpy(raw, data, size) != 0) { + return -1; + } + return 0; +} + +static int +_rawstring_from_pyobj_attr(_rawstring *raw, PyObject *obj, const char *attr) +{ + int res = -1; + PyObject *valueobj = PyObject_GetAttrString(obj, attr); + if (valueobj == NULL) { + goto done; + } + if (!PyUnicode_Check(valueobj)) { + // XXX PyObject_Str()? Repr()? + goto done; + } + const char *valuestr = PyUnicode_AsUTF8(valueobj); + if (valuestr == NULL) { + if (PyErr_Occurred()) { + goto done; + } + } else if (_rawstring_strcpy(raw, valuestr, 0) != 0) { + _rawstring_clear(raw); + goto done; + } + res = 0; + +done: + Py_XDECREF(valueobj); + return res; +} + +static PyObject * +_rawstring_as_pybytes(_rawstring *raw) +{ + return PyBytes_FromStringAndSize(raw->data, raw->size); +} + + +/* object utils *************************************************************/ + +static void +_pyobj_identify_type(PyObject *obj, _rawstring *modname, _rawstring *clsname) +{ + PyObject *objtype = (PyObject *)Py_TYPE(obj); + + // Try __module__ and __name__. + if (_rawstring_from_pyobj_attr(modname, objtype, "__module__") != 0) { + // Fall back to the previous values in "modname". + IGNORE_FAILURE("bad __module__"); + } + if (_rawstring_from_pyobj_attr(clsname, objtype, "__name__") != 0) { + // Fall back to the previous values in "clsname". + IGNORE_FAILURE("bad __name__"); + } + + // XXX Fall back to __qualname__? + // XXX Fall back to tp_name? +} + +static PyObject * +_pyobj_get_class(const char *modname, const char *clsname) +{ + assert(clsname != NULL); + if (modname == NULL) { + modname = "builtins"; + } + + PyObject *module = PyImport_ImportModule(modname); + if (module == NULL) { + return NULL; + } + PyObject *cls = PyObject_GetAttrString(module, clsname); + Py_DECREF(module); + return cls; +} + +static PyObject * +_pyobj_create(const char *modname, const char *clsname, PyObject *arg) +{ + PyObject *cls = _pyobj_get_class(modname, clsname); + if (cls == NULL) { + return NULL; + } + PyObject *obj = NULL; + if (arg == NULL) { + obj = _PyObject_CallNoArg(cls); + } else { + obj = PyObject_CallFunction(cls, "O", arg); + } + Py_DECREF(cls); + return obj; +} + + +/* object snapshots */ + +typedef struct _objsnapshot { + // If modname is NULL then try "builtins" and "__main__". + _rawstring modname; + // clsname is required. + _rawstring clsname; + + // The rest are optional. + + // The serialized exception. + _rawstring *serialized; +} _objsnapshot; + +static void +_objsnapshot_init(_objsnapshot *osn) +{ + _rawstring_init(&osn->modname); + _rawstring_init(&osn->clsname); + osn->serialized = NULL; +} + +//static _objsnapshot * +//_objsnapshot_new(void) +//{ +// _objsnapshot *osn = PyMem_NEW(_objsnapshot, 1); +// if (osn == NULL) { +// PyErr_NoMemory(); +// return NULL; +// } +// _objsnapshot_init(osn); +// return osn; +//} + +static void +_objsnapshot_clear(_objsnapshot *osn) +{ + _rawstring_clear(&osn->modname); + _rawstring_clear(&osn->clsname); + if (osn->serialized != NULL) { + _rawstring_free(osn->serialized); + osn->serialized = NULL; + } +} + +//static void +//_objsnapshot_free(_objsnapshot *osn) +//{ +// _objsnapshot_clear(osn); +// PyMem_Free(osn); +//} + +static int +_objsnapshot_is_clear(_objsnapshot *osn) +{ + return osn->serialized == NULL + && _rawstring_is_clear(&osn->modname) + && _rawstring_is_clear(&osn->clsname); +} + +static void +_objsnapshot_summarize(_objsnapshot *osn, _rawstring *rawbuf, const char *msg) +{ + if (msg == NULL || *msg == '\0') { + // XXX Keep it NULL? + // XXX Keep it an empty string? + // XXX Use something more informative? + msg = ""; + } + const char *clsname = osn->clsname.data; + const char *modname = osn->modname.data; + if (modname && *modname == '\0') { + modname = NULL; + } + + // Prep the buffer. + Py_ssize_t size = strlen(clsname); + if (modname != NULL) { + if (strcmp(modname, "builtins") == 0) { + modname = NULL; + } else if (strcmp(modname, "__main__") == 0) { + modname = NULL; + } else { + size += strlen(modname) + 1; + } + } + if (msg != NULL) { + size += strlen(": ") + strlen(msg); + } + if (modname != NULL || msg != NULL) { + if (_rawstring_buffer(rawbuf, size) != 0) { + IGNORE_FAILURE("could not summarize object snapshot"); + return; + } + } + // ...else we'll proxy clsname as-is, so no need to allocate a buffer. + + // XXX Use __qualname__ somehow? + char *buf = (char *)rawbuf->data; + if (modname != NULL) { + if (msg != NULL) { + snprintf(buf, size+1, "%s.%s: %s", modname, clsname, msg); + } else { + snprintf(buf, size+1, "%s.%s", modname, clsname); + } + } else if (msg != NULL) { + snprintf(buf, size+1, "%s: %s", clsname, msg); + } else { + _rawstring_proxy(rawbuf, clsname); + } +} + +static _rawstring * +_objsnapshot_get_minimal_summary(_objsnapshot *osn, PyObject *obj) +{ + const char *str = NULL; + PyObject *objstr = PyObject_Str(obj); + if (objstr == NULL) { + PyErr_Clear(); + } else { + str = PyUnicode_AsUTF8(objstr); + if (str == NULL) { + PyErr_Clear(); + } + } + + _rawstring *summary = _rawstring_new(); + if (summary == NULL) { + return NULL; + } + _objsnapshot_summarize(osn, summary, str); + return summary; +} + +static void +_objsnapshot_extract(_objsnapshot *osn, PyObject *obj) +{ + assert(_objsnapshot_is_clear(osn)); + + // Get the "qualname". + _rawstring_proxy(&osn->modname, ""); + _rawstring_proxy(&osn->clsname, ""); + _pyobj_identify_type(obj, &osn->modname, &osn->clsname); + + // Serialize the object. + // XXX Use marshal? + PyObject *pickle = PyImport_ImportModule("pickle"); + if (pickle == NULL) { + IGNORE_FAILURE("could not serialize object: pickle import failed"); + return; + } + PyObject *objdata = PyObject_CallMethod(pickle, "dumps", "(O)", obj); + Py_DECREF(pickle); + if (objdata == NULL) { + IGNORE_FAILURE("could not serialize object: pickle.dumps failed"); + } else { + _rawstring *serialized = _rawstring_new(); + int res = _rawstring_from_pyobj(serialized, objdata); + Py_DECREF(objdata); + if (res != 0) { + IGNORE_FAILURE("could not serialize object: raw str failed"); + _rawstring_free(serialized); + } else if (serialized->size == 0) { + _rawstring_free(serialized); + } else { + osn->serialized = serialized; + } + } +} + +static PyObject * +_objsnapshot_resolve_serialized(_objsnapshot *osn) +{ + assert(osn->serialized != NULL); + + // XXX Use marshal? + PyObject *pickle = PyImport_ImportModule("pickle"); + if (pickle == NULL) { + return NULL; + } + PyObject *objdata = _rawstring_as_pybytes(osn->serialized); + if (objdata == NULL) { + return NULL; + } else { + PyObject *obj = PyObject_CallMethod(pickle, "loads", "O", objdata); + Py_DECREF(objdata); + return obj; + } +} + +static PyObject * +_objsnapshot_resolve_naive(_objsnapshot *osn, PyObject *arg) +{ + if (_rawstring_is_clear(&osn->clsname)) { + // We can't proceed without at least the class name. + PyErr_SetString(PyExc_ValueError, "missing class name"); + return NULL; + } + + if (osn->modname.data != NULL) { + return _pyobj_create(osn->modname.data, osn->clsname.data, arg); + } else { + PyObject *obj = _pyobj_create("builtins", osn->clsname.data, arg); + if (obj == NULL) { + PyErr_Clear(); + obj = _pyobj_create("__main__", osn->clsname.data, arg); + } + return obj; + } +} + +static PyObject * +_objsnapshot_resolve(_objsnapshot *osn) +{ + if (osn->serialized != NULL) { + PyObject *obj = _objsnapshot_resolve_serialized(osn); + if (obj != NULL) { + return obj; + } + IGNORE_FAILURE("could not de-serialize object"); + } + + // Fall back to naive resolution. + return _objsnapshot_resolve_naive(osn, NULL); +} + + +/* exception utils **********************************************************/ + +// _pyexc_create is inspired by _PyErr_SetObject(). + +static PyObject * +_pyexc_create(PyObject *exctype, const char *msg, PyObject *tb) +{ + assert(exctype != NULL && PyExceptionClass_Check(exctype)); + + PyObject *curtype = NULL, *curexc = NULL, *curtb = NULL; + PyErr_Fetch(&curtype, &curexc, &curtb); + + // Create the object. + PyObject *exc = NULL; + if (msg != NULL) { + PyObject *msgobj = PyUnicode_FromString(msg); + if (msgobj == NULL) { + IGNORE_FAILURE("could not deserialize propagated error message"); + } + exc = _PyObject_CallOneArg(exctype, msgobj); + Py_XDECREF(msgobj); + } else { + exc = _PyObject_CallNoArg(exctype); + } + if (exc == NULL) { + return NULL; + } + + // Set the traceback, if any. + if (tb == NULL) { + tb = curtb; + } + if (tb != NULL) { + // This does *not* steal a reference! + PyException_SetTraceback(exc, tb); + } + + PyErr_Restore(curtype, curexc, curtb); + + return exc; +} + +/* traceback snapshots */ + +typedef struct _tbsnapshot { + _rawstring tbs_funcname; + _rawstring tbs_filename; + int tbs_lineno; + struct _tbsnapshot *tbs_next; +} _tbsnapshot; + +static void +_tbsnapshot_init(_tbsnapshot *tbs) +{ + _rawstring_init(&tbs->tbs_funcname); + _rawstring_init(&tbs->tbs_filename); + tbs->tbs_lineno = -1; + tbs->tbs_next = NULL; +} + +static _tbsnapshot * +_tbsnapshot_new(void) +{ + _tbsnapshot *tbs = PyMem_NEW(_tbsnapshot, 1); + if (tbs == NULL) { + PyErr_NoMemory(); + return NULL; + } + _tbsnapshot_init(tbs); + return tbs; +} + +static void _tbsnapshot_free(_tbsnapshot *); // forward + +static void +_tbsnapshot_clear(_tbsnapshot *tbs) +{ + _rawstring_clear(&tbs->tbs_funcname); + _rawstring_clear(&tbs->tbs_filename); + tbs->tbs_lineno = -1; + if (tbs->tbs_next != NULL) { + _tbsnapshot_free(tbs->tbs_next); + tbs->tbs_next = NULL; + } +} + +static void +_tbsnapshot_free(_tbsnapshot *tbs) +{ + _tbsnapshot_clear(tbs); + PyMem_Free(tbs); +} + +static int +_tbsnapshot_is_clear(_tbsnapshot *tbs) +{ + return tbs->tbs_lineno == -1 && tbs->tbs_next == NULL + && _rawstring_is_clear(&tbs->tbs_funcname) + && _rawstring_is_clear(&tbs->tbs_filename); +} + +static int +_tbsnapshot_from_pytb(_tbsnapshot *tbs, PyTracebackObject *pytb) +{ + assert(_tbsnapshot_is_clear(tbs)); + assert(pytb != NULL); + + PyCodeObject *pycode = pytb->tb_frame->f_code; + const char *funcname = PyUnicode_AsUTF8(pycode->co_name); + if (_rawstring_strcpy(&tbs->tbs_funcname, funcname, 0) != 0) { + goto error; + } + const char *filename = PyUnicode_AsUTF8(pycode->co_filename); + if (_rawstring_strcpy(&tbs->tbs_filename, filename, 0) != 0) { + goto error; + } + tbs->tbs_lineno = pytb->tb_lineno; + + return 0; + +error: + _tbsnapshot_clear(tbs); + return -1; +} + +static int +_tbsnapshot_extract(_tbsnapshot *tbs, PyTracebackObject *pytb) +{ + assert(_tbsnapshot_is_clear(tbs)); + assert(pytb != NULL); + + _tbsnapshot *next = NULL; + while (pytb->tb_next != NULL) { + _tbsnapshot *_next = _tbsnapshot_new(); + if (_next == NULL) { + goto error; + } + if (_tbsnapshot_from_pytb(_next, pytb) != 0) { + goto error; + } + if (next != NULL) { + _next->tbs_next = next; + } + next = _next; + pytb = pytb->tb_next; + } + if (_tbsnapshot_from_pytb(tbs, pytb) != 0) { + goto error; + } + tbs->tbs_next = next; + + return 0; + +error: + _tbsnapshot_clear(tbs); + return -1; +} + +static PyObject * +_tbsnapshot_resolve(_tbsnapshot *tbs) +{ + assert(!PyErr_Occurred()); + // At this point there should be no traceback set yet. + + while (tbs != NULL) { + const char *funcname = tbs->tbs_funcname.data; + const char *filename = tbs->tbs_filename.data; + _PyTraceback_Add(funcname ? funcname : "", + filename ? filename : "", + tbs->tbs_lineno); + tbs = tbs->tbs_next; + } + + PyObject *exctype = NULL, *excval = NULL, *tb = NULL; + PyErr_Fetch(&exctype, &excval, &tb); + // Leave it cleared. + return tb; +} + +/* exception snapshots */ + +typedef struct _excsnapshot { + _objsnapshot es_object; + _rawstring *es_msg; + struct _excsnapshot *es_cause; + struct _excsnapshot *es_context; + char es_suppress_context; + struct _tbsnapshot *es_traceback; +} _excsnapshot; + +static void +_excsnapshot_init(_excsnapshot *es) +{ + _objsnapshot_init(&es->es_object); + es->es_msg = NULL; + es->es_cause = NULL; + es->es_context = NULL; + es->es_suppress_context = 0; + es->es_traceback = NULL; +} + +static _excsnapshot * +_excsnapshot_new(void) { + _excsnapshot *es = PyMem_NEW(_excsnapshot, 1); + if (es == NULL) { + PyErr_NoMemory(); + return NULL; + } + _excsnapshot_init(es); + return es; +} + +static void _excsnapshot_free(_excsnapshot *); // forward + +static void +_excsnapshot_clear(_excsnapshot *es) +{ + _objsnapshot_clear(&es->es_object); + if (es->es_msg != NULL) { + _rawstring_free(es->es_msg); + es->es_msg = NULL; + } + if (es->es_cause != NULL) { + _excsnapshot_free(es->es_cause); + es->es_cause = NULL; + } + if (es->es_context != NULL) { + _excsnapshot_free(es->es_context); + es->es_context = NULL; + } + es->es_suppress_context = 0; + if (es->es_traceback != NULL) { + _tbsnapshot_free(es->es_traceback); + es->es_traceback = NULL; + } +} + +static void +_excsnapshot_free(_excsnapshot *es) +{ + _excsnapshot_clear(es); + PyMem_Free(es); +} + +static int +_excsnapshot_is_clear(_excsnapshot *es) +{ + return es->es_suppress_context == 0 + && es->es_cause == NULL + && es->es_context == NULL + && es->es_traceback == NULL + && es->es_msg == NULL + && _objsnapshot_is_clear(&es->es_object); +} + +static PyObject * +_excsnapshot_get_exc_naive(_excsnapshot *es) +{ + _rawstring buf; + const char *msg = NULL; + if (es->es_msg != NULL) { + msg = es->es_msg->data; + } else { + _objsnapshot_summarize(&es->es_object, &buf, NULL); + if (buf.size > 0) { + msg = buf.data; + } + } + + PyObject *exc = NULL; + // XXX Use _objsnapshot_resolve_naive()? + const char *modname = es->es_object.modname.size > 0 + ? es->es_object.modname.data + : NULL; + PyObject *exctype = _pyobj_get_class(modname, es->es_object.clsname.data); + if (exctype != NULL) { + exc = _pyexc_create(exctype, msg, NULL); + Py_DECREF(exctype); + if (exc != NULL) { + return exc; + } + PyErr_Clear(); + } else { + PyErr_Clear(); + } + exctype = PyExc_Exception; + return _pyexc_create(exctype, msg, NULL); +} + +static PyObject * +_excsnapshot_get_exc(_excsnapshot *es) +{ + assert(!_objsnapshot_is_clear(&es->es_object)); + + PyObject *exc = _objsnapshot_resolve(&es->es_object); + if (exc == NULL) { + // Fall back to resolving the object. + PyObject *curtype = NULL, *curexc = NULL, *curtb = NULL; + PyErr_Fetch(&curtype, &curexc, &curtb); + + exc = _excsnapshot_get_exc_naive(es); + if (exc == NULL) { + PyErr_Restore(curtype, curexc, curtb); + return NULL; + } + } + // People can do some weird stuff... + if (!PyExceptionInstance_Check(exc)) { + // We got a bogus "exception". + Py_DECREF(exc); + PyErr_SetString(PyExc_TypeError, "expected exception"); + return NULL; + } + return exc; +} + +static void _excsnapshot_extract(_excsnapshot *, PyObject *); +static void +_excsnapshot_extract(_excsnapshot *es, PyObject *excobj) +{ + assert(_excsnapshot_is_clear(es)); + assert(PyExceptionInstance_Check(excobj)); + + _objsnapshot_extract(&es->es_object, excobj); + + es->es_msg = _objsnapshot_get_minimal_summary(&es->es_object, excobj); + if (es->es_msg == NULL) { + PyErr_Clear(); + } + + PyBaseExceptionObject *exc = (PyBaseExceptionObject *)excobj; + + if (exc->cause != NULL && exc->cause != Py_None) { + es->es_cause = _excsnapshot_new(); + _excsnapshot_extract(es->es_cause, exc->cause); + } + + if (exc->context != NULL && exc->context != Py_None) { + es->es_context = _excsnapshot_new(); + _excsnapshot_extract(es->es_context, exc->context); + } + + es->es_suppress_context = exc->suppress_context; + + PyObject *tb = PyException_GetTraceback(excobj); + if (PyErr_Occurred()) { + IGNORE_FAILURE("could not get traceback"); + } else if (tb == Py_None) { + Py_DECREF(tb); + tb = NULL; + } + if (tb != NULL) { + es->es_traceback = _tbsnapshot_new(); + if (_tbsnapshot_extract(es->es_traceback, + (PyTracebackObject *)tb) != 0) { + IGNORE_FAILURE("could not extract __traceback__"); + } + } +} + +static PyObject * +_excsnapshot_resolve(_excsnapshot *es) +{ + PyObject *exc = _excsnapshot_get_exc(es); + if (exc == NULL) { + return NULL; + } + + if (es->es_traceback != NULL) { + PyObject *tb = _tbsnapshot_resolve(es->es_traceback); + if (tb == NULL) { + // The snapshot is still somewhat useful without this. + IGNORE_FAILURE("could not deserialize traceback"); + } else { + // This does not steal references. + PyException_SetTraceback(exc, tb); + Py_DECREF(tb); + } + } + // NULL means "not set". + + if (es->es_context != NULL) { + PyObject *context = _excsnapshot_resolve(es->es_context); + if (context == NULL) { + // The snapshot is still useful without this. + IGNORE_FAILURE("could not deserialize __context__"); + } else { + // This steals references but we have one to give. + PyException_SetContext(exc, context); + } + } + // NULL means "not set". + + if (es->es_cause != NULL) { + PyObject *cause = _excsnapshot_resolve(es->es_cause); + if (cause == NULL) { + // The snapshot is still useful without this. + IGNORE_FAILURE("could not deserialize __cause__"); + } else { + // This steals references, but we have one to give. + PyException_SetCause(exc, cause); + } + } + // NULL means "not set". + + ((PyBaseExceptionObject *)exc)->suppress_context = es->es_suppress_context; + + return exc; } /* data-sharing-specific code ***********************************************/ +/* shared "object" */ + struct _sharednsitem { - char *name; + _rawstring name; _PyCrossInterpreterData data; }; @@ -44,8 +929,7 @@ static void _sharednsitem_clear(struct _sharednsitem *); // forward static int _sharednsitem_init(struct _sharednsitem *item, PyObject *key, PyObject *value) { - item->name = _copy_raw_string(key); - if (item->name == NULL) { + if (_rawstring_from_pyobj(&item->name, key) != 0) { return -1; } if (_PyObject_GetCrossInterpreterData(value, &item->data) != 0) { @@ -58,17 +942,14 @@ _sharednsitem_init(struct _sharednsitem *item, PyObject *key, PyObject *value) static void _sharednsitem_clear(struct _sharednsitem *item) { - if (item->name != NULL) { - PyMem_Free(item->name); - item->name = NULL; - } + _rawstring_clear(&item->name); _PyCrossInterpreterData_Release(&item->data); } static int _sharednsitem_apply(struct _sharednsitem *item, PyObject *ns) { - PyObject *name = PyUnicode_FromString(item->name); + PyObject *name = PyUnicode_FromString(item->name.data); if (name == NULL) { return -1; } @@ -159,121 +1040,119 @@ _sharedns_apply(_sharedns *shared, PyObject *ns) return 0; } +/* shared exception */ + // Ultimately we'd like to preserve enough information about the // exception and traceback that we could re-constitute (or at least // simulate, a la traceback.TracebackException), and even chain, a copy // of the exception in the calling interpreter. typedef struct _sharedexception { - char *name; - char *msg; + _excsnapshot snapshot; + _rawstring msg; } _sharedexception; +static void +_sharedexception_init(_sharedexception *she) +{ + _excsnapshot_init(&she->snapshot); + _rawstring_init(&she->msg); +} + static _sharedexception * _sharedexception_new(void) { - _sharedexception *err = PyMem_NEW(_sharedexception, 1); - if (err == NULL) { + _sharedexception *she = PyMem_NEW(_sharedexception, 1); + if (she == NULL) { PyErr_NoMemory(); return NULL; } - err->name = NULL; - err->msg = NULL; - return err; + _sharedexception_init(she); + return she; } static void -_sharedexception_clear(_sharedexception *exc) +_sharedexception_clear(_sharedexception *she) { - if (exc->name != NULL) { - PyMem_Free(exc->name); - } - if (exc->msg != NULL) { - PyMem_Free(exc->msg); - } + _excsnapshot_clear(&she->snapshot); + _rawstring_clear(&she->msg); } static void -_sharedexception_free(_sharedexception *exc) +_sharedexception_free(_sharedexception *she) { - _sharedexception_clear(exc); - PyMem_Free(exc); + _sharedexception_clear(she); + PyMem_Free(she); } -static _sharedexception * -_sharedexception_bind(PyObject *exctype, PyObject *exc, PyObject *tb) +static int +_sharedexception_is_clear(_sharedexception *she) { - assert(exctype != NULL); - char *failure = NULL; - - _sharedexception *err = _sharedexception_new(); - if (err == NULL) { - goto finally; - } + return 1 + && _excsnapshot_is_clear(&she->snapshot) + && _rawstring_is_clear(&she->msg); +} - PyObject *name = PyUnicode_FromFormat("%S", exctype); - if (name == NULL) { - failure = "unable to format exception type name"; - goto finally; - } - err->name = _copy_raw_string(name); - Py_DECREF(name); - if (err->name == NULL) { - if (PyErr_ExceptionMatches(PyExc_MemoryError)) { - failure = "out of memory copying exception type name"; - } else { - failure = "unable to encode and copy exception type name"; +static PyObject * +_sharedexception_get_cause(_sharedexception *sharedexc) +{ + // FYI, "cause" is already normalized. + PyObject *cause = _excsnapshot_resolve(&sharedexc->snapshot); + if (cause == NULL) { + if (PyErr_Occurred()) { + IGNORE_FAILURE("could not deserialize exc snapshot"); } - goto finally; + return NULL; } + // XXX Ensure "cause" has a traceback. + return cause; +} - if (exc != NULL) { - PyObject *msg = PyUnicode_FromFormat("%S", exc); - if (msg == NULL) { - failure = "unable to format exception message"; - goto finally; - } - err->msg = _copy_raw_string(msg); - Py_DECREF(msg); - if (err->msg == NULL) { - if (PyErr_ExceptionMatches(PyExc_MemoryError)) { - failure = "out of memory copying exception message"; - } else { - failure = "unable to encode and copy exception message"; - } - goto finally; - } - } +static void +_sharedexception_extract(_sharedexception *she, PyObject *exc) +{ + assert(_sharedexception_is_clear(she)); + assert(exc != NULL); -finally: - if (failure != NULL) { - PyErr_Clear(); - if (err->name != NULL) { - PyMem_Free(err->name); - err->name = NULL; + _excsnapshot_extract(&she->snapshot, exc); + + // Compose the message. + const char *msg = NULL; + PyObject *msgobj = PyUnicode_FromFormat("%S", exc); + if (msgobj == NULL) { + IGNORE_FAILURE("unable to format exception message"); + } else { + msg = PyUnicode_AsUTF8(msgobj); + if (PyErr_Occurred()) { + PyErr_Clear(); } - err->msg = failure; } - return err; + _objsnapshot_summarize(&she->snapshot.es_object, &she->msg, msg); + Py_XDECREF(msgobj); } -static void -_sharedexception_apply(_sharedexception *exc, PyObject *wrapperclass) +static PyObject * +_sharedexception_resolve(_sharedexception *sharedexc, PyObject *wrapperclass) { - if (exc->name != NULL) { - if (exc->msg != NULL) { - PyErr_Format(wrapperclass, "%s: %s", exc->name, exc->msg); - } - else { - PyErr_SetString(wrapperclass, exc->name); - } - } - else if (exc->msg != NULL) { - PyErr_SetString(wrapperclass, exc->msg); - } - else { - PyErr_SetNone(wrapperclass); + assert(!PyErr_Occurred()); + + // Get the exception object (already normalized). + PyObject *exc = _pyexc_create(wrapperclass, sharedexc->msg.data, NULL); + assert(exc != NULL); + + // Set __cause__, is possible. + PyObject *cause = _sharedexception_get_cause(sharedexc); + if (cause != NULL) { + // Set __context__. + Py_INCREF(cause); // PyException_SetContext() steals a reference. + PyException_SetContext(exc, cause); + + // Set __cause__. + Py_INCREF(cause); // PyException_SetCause() steals a reference. + PyException_SetCause(exc, cause); } + + return exc; } @@ -1869,11 +2748,9 @@ _ensure_not_running(PyInterpreterState *interp) static int _run_script(PyInterpreterState *interp, const char *codestr, - _sharedns *shared, _sharedexception **exc) + _sharedns *shared, _sharedexception **pexc) { - PyObject *exctype = NULL; - PyObject *excval = NULL; - PyObject *tb = NULL; + assert(!PyErr_Occurred()); // ...in the called interpreter. PyObject *main_mod = _PyInterpreterState_GetMainModule(interp); if (main_mod == NULL) { @@ -1904,25 +2781,38 @@ _run_script(PyInterpreterState *interp, const char *codestr, Py_DECREF(result); // We throw away the result. } - *exc = NULL; + *pexc = NULL; return 0; + PyObject *exctype = NULL, *exc = NULL, *tb = NULL; error: - PyErr_Fetch(&exctype, &excval, &tb); + PyErr_Fetch(&exctype, &exc, &tb); - _sharedexception *sharedexc = _sharedexception_bind(exctype, excval, tb); - Py_XDECREF(exctype); - Py_XDECREF(excval); - Py_XDECREF(tb); - if (sharedexc == NULL) { - fprintf(stderr, "RunFailedError: script raised an uncaught exception"); - PyErr_Clear(); - sharedexc = NULL; + // First normalize the exception. + PyErr_NormalizeException(&exctype, &exc, &tb); + assert(PyExceptionInstance_Check(exc)); + if (tb != NULL) { + PyException_SetTraceback(exc, tb); } - else { + + // Behave as though the exception was caught in this thread. + PyErr_SetExcInfo(exctype, exc, tb); // Like entering "except" block. + + // Serialize the exception. + _sharedexception *sharedexc = _sharedexception_new(); + if (sharedexc == NULL) { + IGNORE_FAILURE("script raised an uncaught exception"); + } else { + _sharedexception_extract(sharedexc, exc); assert(!PyErr_Occurred()); } - *exc = sharedexc; + + // Clear the exception. + PyErr_SetExcInfo(NULL, NULL, NULL); // Like leaving "except" block. + PyErr_Clear(); // Do not re-raise. + + // "Return" the serialized exception. + *pexc = sharedexc; return -1; } @@ -1930,6 +2820,8 @@ static int _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, PyObject *shareables) { + assert(!PyErr_Occurred()); // ...in the calling interpreter. + if (_ensure_not_running(interp) < 0) { return -1; } @@ -1963,8 +2855,8 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, } // Run the script. - _sharedexception *exc = NULL; - int result = _run_script(interp, codestr, shared, &exc); + _sharedexception *sharedexc = NULL; + int result = _run_script(interp, codestr, shared, &sharedexc); // Switch back. if (save_tstate != NULL) { @@ -1973,9 +2865,14 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, #endif // Propagate any exception out to the caller. - if (exc != NULL) { - _sharedexception_apply(exc, RunFailedError); - _sharedexception_free(exc); + if (sharedexc != NULL) { + assert(!PyErr_Occurred()); + PyObject *exc = _sharedexception_resolve(sharedexc, RunFailedError); + // XXX This is not safe once interpreters no longer share allocators. + _sharedexception_free(sharedexc); + PyObject *exctype = (PyObject *)Py_TYPE(exc); + Py_INCREF(exctype); // PyErr_Restore() steals a reference. + PyErr_Restore(exctype, exc, PyException_GetTraceback(exc)); } else if (result != 0) { // We were unable to allocate a shared exception. From c068b53a0ca6ebf740d98e422569d2f705e54f93 Mon Sep 17 00:00:00 2001 From: Hai Shi Date: Fri, 8 May 2020 01:16:01 +0800 Subject: [PATCH 039/115] bpo-38787: Update structures.rst docs (PEP 573) (GH-19980) --- Doc/c-api/structures.rst | 4 ++-- Modules/_testmultiphase.c | 16 ++++++---------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index 72c94459295c41..ea97e1e715561f 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -150,7 +150,7 @@ Implementing functions and methods The function signature is:: PyObject *PyCFunction(PyObject *self, - PyObject *const *args); + PyObject *args); .. c:type:: PyCFunctionWithKeywords @@ -159,7 +159,7 @@ Implementing functions and methods The function signature is:: PyObject *PyCFunctionWithKeywords(PyObject *self, - PyObject *const *args, + PyObject *args, PyObject *kwargs); diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index 3084fc12a5ef51..d69ae628fa7a40 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -820,16 +820,12 @@ static PyModuleDef_Slot meth_state_access_slots[] = { }; static PyModuleDef def_meth_state_access = { - PyModuleDef_HEAD_INIT, /* m_base */ - "_testmultiphase_meth_state_access", /* m_name */ - PyDoc_STR("Module testing access" - " to state from methods."), - sizeof(meth_state), /* m_size */ - NULL, /* m_methods */ - meth_state_access_slots, /* m_slots */ - 0, /* m_traverse */ - 0, /* m_clear */ - 0, /* m_free */ + PyModuleDef_HEAD_INIT, + .m_name = "_testmultiphase_meth_state_access", + .m_doc = PyDoc_STR("Module testing access" + " to state from methods."), + .m_size = sizeof(meth_state), + .m_slots = meth_state_access_slots, }; PyMODINIT_FUNC From 4e363761fc02a89d53aba4382dc451293bd6f0ba Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 7 May 2020 22:42:14 +0200 Subject: [PATCH 040/115] bpo-40548: Always run GitHub action, even on doc PRs (GH-19981) Always run GitHub action jobs, even on documentation-only pull requests. So it will be possible to make a GitHub action job, like the Windows (64-bit) job, mandatory. --- .github/workflows/build.yml | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 50d1561518bd82..6e6a6d2b789d34 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,26 +1,19 @@ name: Tests +# bpo-40548: "paths-ignore" is not used to skip documentation-only PRs, because +# it prevents to mark a job as mandatory. A PR cannot be merged if a job is +# mandatory but not scheduled because of "paths-ignore". on: push: branches: - master - 3.8 - 3.7 - paths-ignore: - - 'Doc/**' - - 'Misc/**' - - '**/*.md' - - '**/*.rst' pull_request: branches: - master - 3.8 - 3.7 - paths-ignore: - - 'Doc/**' - - 'Misc/**' - - '**/*.md' - - '**/*.rst' jobs: build_win32: From b7a78ca74ab539943ab11b5c4c9cfab7f5b7ff5a Mon Sep 17 00:00:00 2001 From: Batuhan Taskaya Date: Thu, 7 May 2020 23:57:26 +0300 Subject: [PATCH 041/115] bpo-40517: Implement syntax highlighting support for ASDL (GH-19967) --- Doc/conf.py | 3 +- Doc/library/ast.rst | 2 +- Doc/tools/extensions/asdl_highlight.py | 51 ++++++++++++++++++++++++++ 3 files changed, 54 insertions(+), 2 deletions(-) create mode 100644 Doc/tools/extensions/asdl_highlight.py diff --git a/Doc/conf.py b/Doc/conf.py index 32db34344a70a1..12d74ea24ce4ac 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -14,7 +14,8 @@ # --------------------- extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest', - 'pyspecific', 'c_annotations', 'escape4chm'] + 'pyspecific', 'c_annotations', 'escape4chm', + 'asdl_highlight'] doctest_global_setup = ''' diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index fc04114949c0c3..6c6ad01b842c8e 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -35,7 +35,7 @@ Abstract Grammar The abstract grammar is currently defined as follows: .. literalinclude:: ../../Parser/Python.asdl - :language: none + :language: asdl Node classes diff --git a/Doc/tools/extensions/asdl_highlight.py b/Doc/tools/extensions/asdl_highlight.py new file mode 100644 index 00000000000000..7d2ef011c1b766 --- /dev/null +++ b/Doc/tools/extensions/asdl_highlight.py @@ -0,0 +1,51 @@ +import os +import sys +sys.path.append(os.path.abspath("../Parser/")) + +from pygments.lexer import RegexLexer, bygroups, include, words +from pygments.token import (Comment, Generic, Keyword, Name, Operator, + Punctuation, Text) + +from asdl import builtin_types +from sphinx.highlighting import lexers + +class ASDLLexer(RegexLexer): + name = "ASDL" + aliases = ["asdl"] + filenames = ["*.asdl"] + _name = r"([^\W\d]\w*)" + _text_ws = r"(\s*)" + + tokens = { + "ws": [ + (r"\n", Text), + (r"\s+", Text), + (r"--.*?$", Comment.Singleline), + ], + "root": [ + include("ws"), + ( + r"(module)" + _text_ws + _name, + bygroups(Keyword, Text, Name.Tag), + ), + ( + r"(\w+)(\*\s|\?\s|\s)(\w+)", + bygroups(Name.Builtin.Pseudo, Operator, Name), + ), + (words(builtin_types), Name.Builtin), + (r"attributes", Name.Builtin), + ( + _name + _text_ws + "(=)", + bygroups(Name, Text, Operator), + ), + (_name, Name.Class), + (r"\|", Operator), + (r"{|}|\(|\)", Punctuation), + (r".", Text), + ], + } + + +def setup(app): + lexers["asdl"] = ASDLLexer() + return {'version': '1.0', 'parallel_read_safe': True} From db9163ceef31ba00ccb23226917f9c8e9142a0b8 Mon Sep 17 00:00:00 2001 From: Pablo Galindo Date: Fri, 8 May 2020 03:38:44 +0100 Subject: [PATCH 042/115] bpo-40555: Check for p->error_indicator in loop rules after the main loop is done (GH-19986) --- Lib/test/test_eof.py | 9 ++++ Parser/pegen/parse.c | 54 ++++++++++++------------ Tools/peg_generator/pegen/c_generator.py | 2 +- 3 files changed, 37 insertions(+), 28 deletions(-) diff --git a/Lib/test/test_eof.py b/Lib/test/test_eof.py index 9ef8eb1187486f..bebad3106119ec 100644 --- a/Lib/test/test_eof.py +++ b/Lib/test/test_eof.py @@ -26,6 +26,15 @@ def test_EOFS(self): else: raise support.TestFailed + def test_eof_with_line_continuation(self): + expect = "unexpected EOF while parsing (, line 1)" + try: + compile('"\\xhh" \\', '', 'exec', dont_inherit=True) + except SyntaxError as msg: + self.assertEqual(str(msg), expect) + else: + raise support.TestFailed + def test_line_continuation_EOF(self): """A continuation at the end of input must be an error; bpo2180.""" expect = 'unexpected EOF while parsing (, line 1)' diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 3a08abbca581c5..ae86841e8663b0 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -11437,7 +11437,7 @@ _loop1_11_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -11876,7 +11876,7 @@ _loop1_22_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -12252,7 +12252,7 @@ _loop1_31_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -12911,7 +12911,7 @@ _loop1_47_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13267,7 +13267,7 @@ _loop1_56_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13369,7 +13369,7 @@ _loop1_58_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13422,7 +13422,7 @@ _loop1_59_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13475,7 +13475,7 @@ _loop1_60_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13577,7 +13577,7 @@ _loop1_62_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13679,7 +13679,7 @@ _loop1_64_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13781,7 +13781,7 @@ _loop1_66_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -13834,7 +13834,7 @@ _loop1_67_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14007,7 +14007,7 @@ _loop1_71_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14145,7 +14145,7 @@ _loop1_74_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14345,7 +14345,7 @@ _loop1_78_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14447,7 +14447,7 @@ _loop1_80_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14500,7 +14500,7 @@ _loop1_81_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14553,7 +14553,7 @@ _loop1_82_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14655,7 +14655,7 @@ _loop1_84_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14757,7 +14757,7 @@ _loop1_86_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14859,7 +14859,7 @@ _loop1_88_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14912,7 +14912,7 @@ _loop1_89_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -14965,7 +14965,7 @@ _loop1_90_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -15018,7 +15018,7 @@ _loop1_91_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -15358,7 +15358,7 @@ _loop1_99_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -15567,7 +15567,7 @@ _loop1_104_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } @@ -17165,7 +17165,7 @@ _loop1_145_rule(Parser *p) } p->mark = mark; } - if (n == 0) { + if (n == 0 || p->error_indicator) { PyMem_Free(children); return NULL; } diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 3bf6d9ed6a3abb..b7a9942c2fdd27 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -525,7 +525,7 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: rulename=node.name if memoize else None, ) if is_repeat1: - self.print("if (n == 0) {") + self.print("if (n == 0 || p->error_indicator) {") with self.indent(): self.print("PyMem_Free(children);") self.print("return NULL;") From 02fa0ea9c1073e4476c9bde3d7112f5dd964aa57 Mon Sep 17 00:00:00 2001 From: Zackery Spytz Date: Thu, 7 May 2020 23:25:50 -0600 Subject: [PATCH 043/115] bpo-40273: Reversible mappingproxy (FH-19513) --- Doc/library/types.rst | 6 ++++++ Lib/test/test_types.py | 9 +++++++++ .../Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst | 1 + Objects/descrobject.c | 9 +++++++++ 4 files changed, 25 insertions(+) create mode 100644 Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 4cb91c1a90bcfc..1d081e2c54868d 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -329,6 +329,12 @@ Standard names are defined for the following types: Return a new view of the underlying mapping's values. + .. describe:: reversed(proxy) + + Return a reverse iterator over the keys of the underlying mapping. + + .. versionadded:: 3.9 + Additional Utility Classes and Functions ---------------------------------------- diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index f42238762ddcca..28ebfb6e603e36 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -627,6 +627,7 @@ def test_methods(self): '__iter__', '__len__', '__or__', + '__reversed__', '__ror__', 'copy', 'get', @@ -768,6 +769,14 @@ def test_iterators(self): self.assertEqual(set(view.values()), set(values)) self.assertEqual(set(view.items()), set(items)) + def test_reversed(self): + d = {'a': 1, 'b': 2, 'foo': 0, 'c': 3, 'd': 4} + mp = self.mappingproxy(d) + del d['foo'] + r = reversed(mp) + self.assertEqual(list(r), list('dcba')) + self.assertRaises(StopIteration, next, r) + def test_copy(self): original = {'key1': 27, 'key2': 51, 'key3': 93} view = self.mappingproxy(original) diff --git a/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst b/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst new file mode 100644 index 00000000000000..50f547f56c5208 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst @@ -0,0 +1 @@ +:class:`types.MappingProxyType` is now reversible. diff --git a/Objects/descrobject.c b/Objects/descrobject.c index c9754a11b89be1..c29cf7a4c44640 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -1118,6 +1118,13 @@ mappingproxy_copy(mappingproxyobject *pp, PyObject *Py_UNUSED(ignored)) return _PyObject_CallMethodIdNoArgs(pp->mapping, &PyId_copy); } +static PyObject * +mappingproxy_reversed(mappingproxyobject *pp, PyObject *Py_UNUSED(ignored)) +{ + _Py_IDENTIFIER(__reversed__); + return _PyObject_CallMethodIdNoArgs(pp->mapping, &PyId___reversed__); +} + /* WARNING: mappingproxy methods must not give access to the underlying mapping */ @@ -1135,6 +1142,8 @@ static PyMethodDef mappingproxy_methods[] = { PyDoc_STR("D.copy() -> a shallow copy of D")}, {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, + {"__reversed__", (PyCFunction)mappingproxy_reversed, METH_NOARGS, + PyDoc_STR("D.__reversed__() -> reverse iterator")}, {0} }; From d2c349b190bcba21a4a38e6520a48ad97a9f1529 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Fri, 8 May 2020 03:54:38 -0700 Subject: [PATCH 044/115] bpo-40559: Add Py_DECREF to _asynciomodule.c:task_step_impl() (GH-19990) This fixes a possible memory leak in the C implementation of asyncio.Task. --- .../next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst | 1 + Modules/_asynciomodule.c | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst diff --git a/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst new file mode 100644 index 00000000000000..15846351f25bbe --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst @@ -0,0 +1 @@ +Fix possible memory leak in the C implementation of :class:`asyncio.Task`. \ No newline at end of file diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index a03a63119bab3d..cc211a8895a8e8 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -2638,6 +2638,10 @@ task_step_impl(TaskObj *task, PyObject *exc) coro = task->task_coro; if (coro == NULL) { PyErr_SetString(PyExc_RuntimeError, "uninitialized Task object"); + if (clear_exc) { + /* We created 'exc' during this call */ + Py_DECREF(exc); + } return NULL; } From 2effef7453986bf43a6d921cd471a8bc0722c36a Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Fri, 8 May 2020 07:39:57 -0400 Subject: [PATCH 045/115] Make the first dataclass example more useful (GH-19994) --- Doc/library/dataclasses.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index c125a1130a9603..fe63d20671dd74 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -19,6 +19,8 @@ in :pep:`557`. The member variables to use in these generated methods are defined using :pep:`526` type annotations. For example this code:: + from dataclasses import dataclass + @dataclass class InventoryItem: '''Class for keeping track of an item in inventory.''' From 81a5fc38e81b424869f4710f48e9371dfa2d3b77 Mon Sep 17 00:00:00 2001 From: Raymond Hettinger Date: Fri, 8 May 2020 07:53:15 -0700 Subject: [PATCH 046/115] bpo-40541: Add optional *counts* parameter to random.sample() (GH-19970) --- Doc/library/random.rst | 21 ++++-- Lib/random.py | 34 +++++++-- Lib/test/test_random.py | 73 ++++++++++++++++++- .../2020-05-06-15-36-47.bpo-40541.LlYghL.rst | 1 + 4 files changed, 116 insertions(+), 13 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst diff --git a/Doc/library/random.rst b/Doc/library/random.rst index f37bc2a111d954..90366f499cae6a 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -217,7 +217,7 @@ Functions for sequences The optional parameter *random*. -.. function:: sample(population, k) +.. function:: sample(population, k, *, counts=None) Return a *k* length list of unique elements chosen from the population sequence or set. Used for random sampling without replacement. @@ -231,6 +231,11 @@ Functions for sequences Members of the population need not be :term:`hashable` or unique. If the population contains repeats, then each occurrence is a possible selection in the sample. + Repeated elements can be specified one at a time or with the optional + keyword-only *counts* parameter. For example, ``sample(['red', 'blue'], + counts=[4, 2], k=5)`` is equivalent to ``sample(['red', 'red', 'red', 'red', + 'blue', 'blue'], k=5)``. + To choose a sample from a range of integers, use a :func:`range` object as an argument. This is especially fast and space efficient for sampling from a large population: ``sample(range(10000000), k=60)``. @@ -238,6 +243,9 @@ Functions for sequences If the sample size is larger than the population size, a :exc:`ValueError` is raised. + .. versionchanged:: 3.9 + Added the *counts* parameter. + .. deprecated:: 3.9 In the future, the *population* must be a sequence. Instances of :class:`set` are no longer supported. The set must first be converted @@ -420,12 +428,11 @@ Simulations:: >>> choices(['red', 'black', 'green'], [18, 18, 2], k=6) ['red', 'green', 'black', 'black', 'red', 'black'] - >>> # Deal 20 cards without replacement from a deck of 52 playing cards - >>> # and determine the proportion of cards with a ten-value - >>> # (a ten, jack, queen, or king). - >>> deck = collections.Counter(tens=16, low_cards=36) - >>> seen = sample(list(deck.elements()), k=20) - >>> seen.count('tens') / 20 + >>> # Deal 20 cards without replacement from a deck + >>> # of 52 playing cards, and determine the proportion of cards + >>> # with a ten-value: ten, jack, queen, or king. + >>> dealt = sample(['tens', 'low cards'], counts=[16, 36], k=20) + >>> dealt.count('tens') / 20 0.15 >>> # Estimate the probability of getting 5 or more heads from 7 spins diff --git a/Lib/random.py b/Lib/random.py index f2c4f39fb6079d..75f70d5d699ed9 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -331,7 +331,7 @@ def shuffle(self, x, random=None): j = _int(random() * (i+1)) x[i], x[j] = x[j], x[i] - def sample(self, population, k): + def sample(self, population, k, *, counts=None): """Chooses k unique random elements from a population sequence or set. Returns a new list containing elements from the population while @@ -344,9 +344,21 @@ def sample(self, population, k): population contains repeats, then each occurrence is a possible selection in the sample. - To choose a sample in a range of integers, use range as an argument. - This is especially fast and space efficient for sampling from a - large population: sample(range(10000000), 60) + Repeated elements can be specified one at a time or with the optional + counts parameter. For example: + + sample(['red', 'blue'], counts=[4, 2], k=5) + + is equivalent to: + + sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) + + To choose a sample from a range of integers, use range() for the + population argument. This is especially fast and space efficient + for sampling from a large population: + + sample(range(10000000), 60) + """ # Sampling without replacement entails tracking either potential @@ -379,8 +391,20 @@ def sample(self, population, k): population = tuple(population) if not isinstance(population, _Sequence): raise TypeError("Population must be a sequence. For dicts or sets, use sorted(d).") - randbelow = self._randbelow n = len(population) + if counts is not None: + cum_counts = list(_accumulate(counts)) + if len(cum_counts) != n: + raise ValueError('The number of counts does not match the population') + total = cum_counts.pop() + if not isinstance(total, int): + raise TypeError('Counts must be integers') + if total <= 0: + raise ValueError('Total of counts must be greater than zero') + selections = sample(range(total), k=k) + bisect = _bisect + return [population[bisect(cum_counts, s)] for s in selections] + randbelow = self._randbelow if not 0 <= k <= n: raise ValueError("Sample larger than population or is negative") result = [None] * k diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py index bb95ca0884a516..a3710f4aa48a68 100644 --- a/Lib/test/test_random.py +++ b/Lib/test/test_random.py @@ -9,7 +9,7 @@ from math import log, exp, pi, fsum, sin, factorial from test import support from fractions import Fraction - +from collections import Counter class TestBasicOps: # Superclass with tests common to all generators. @@ -161,6 +161,77 @@ def test_sample_on_sets(self): population = {10, 20, 30, 40, 50, 60, 70} self.gen.sample(population, k=5) + def test_sample_with_counts(self): + sample = self.gen.sample + + # General case + colors = ['red', 'green', 'blue', 'orange', 'black', 'brown', 'amber'] + counts = [500, 200, 20, 10, 5, 0, 1 ] + k = 700 + summary = Counter(sample(colors, counts=counts, k=k)) + self.assertEqual(sum(summary.values()), k) + for color, weight in zip(colors, counts): + self.assertLessEqual(summary[color], weight) + self.assertNotIn('brown', summary) + + # Case that exhausts the population + k = sum(counts) + summary = Counter(sample(colors, counts=counts, k=k)) + self.assertEqual(sum(summary.values()), k) + for color, weight in zip(colors, counts): + self.assertLessEqual(summary[color], weight) + self.assertNotIn('brown', summary) + + # Case with population size of 1 + summary = Counter(sample(['x'], counts=[10], k=8)) + self.assertEqual(summary, Counter(x=8)) + + # Case with all counts equal. + nc = len(colors) + summary = Counter(sample(colors, counts=[10]*nc, k=10*nc)) + self.assertEqual(summary, Counter(10*colors)) + + # Test error handling + with self.assertRaises(TypeError): + sample(['red', 'green', 'blue'], counts=10, k=10) # counts not iterable + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[-3, -7, -8], k=2) # counts are negative + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[0, 0, 0], k=2) # counts are zero + with self.assertRaises(ValueError): + sample(['red', 'green'], counts=[10, 10], k=21) # population too small + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[1, 2], k=2) # too few counts + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[1, 2, 3, 4], k=2) # too many counts + + def test_sample_counts_equivalence(self): + # Test the documented strong equivalence to a sample with repeated elements. + # We run this test on random.Random() which makes deterministic selections + # for a given seed value. + sample = random.sample + seed = random.seed + + colors = ['red', 'green', 'blue', 'orange', 'black', 'amber'] + counts = [500, 200, 20, 10, 5, 1 ] + k = 700 + seed(8675309) + s1 = sample(colors, counts=counts, k=k) + seed(8675309) + expanded = [color for (color, count) in zip(colors, counts) for i in range(count)] + self.assertEqual(len(expanded), sum(counts)) + s2 = sample(expanded, k=k) + self.assertEqual(s1, s2) + + pop = 'abcdefghi' + counts = [10, 9, 8, 7, 6, 5, 4, 3, 2] + seed(8675309) + s1 = ''.join(sample(pop, counts=counts, k=30)) + expanded = ''.join([letter for (letter, count) in zip(pop, counts) for i in range(count)]) + seed(8675309) + s2 = ''.join(sample(expanded, k=30)) + self.assertEqual(s1, s2) + def test_choices(self): choices = self.gen.choices data = ['red', 'green', 'blue', 'yellow'] diff --git a/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst b/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst new file mode 100644 index 00000000000000..a2e694ac1ad080 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst @@ -0,0 +1 @@ +Added an optional *counts* parameter to random.sample(). From d10091aa171250c67a5079abfe26b8b3964ea39a Mon Sep 17 00:00:00 2001 From: Joannah Nanjekye <33177550+nanjekyejoannah@users.noreply.github.com> Date: Fri, 8 May 2020 17:58:28 -0300 Subject: [PATCH 047/115] bpo-40502: Initialize n->n_col_offset (GH-19988) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * initialize n->n_col_offset * 📜🤖 Added by blurb_it. * Move initialization Co-authored-by: nanjekyejoannah Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> --- .../Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst | 2 ++ Parser/node.c | 1 + 2 files changed, 3 insertions(+) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst new file mode 100644 index 00000000000000..b0ea60234634c5 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst @@ -0,0 +1,2 @@ +Initialize ``n->n_col_offset``. +(Patch by Joannah Nanjekye) \ No newline at end of file diff --git a/Parser/node.c b/Parser/node.c index f1b70e0f6815be..8789e01e9b848c 100644 --- a/Parser/node.c +++ b/Parser/node.c @@ -14,6 +14,7 @@ PyNode_New(int type) n->n_str = NULL; n->n_lineno = 0; n->n_end_lineno = 0; + n->n_col_offset = 0; n->n_end_col_offset = -1; n->n_nchildren = 0; n->n_child = NULL; From 7f7e706d78ab968a1221c6179dfdba714860bd12 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 8 May 2020 19:20:26 -0400 Subject: [PATCH 048/115] bpo-39791: Add files() to importlib.resources (GH-19722) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * bpo-39791: Update importlib.resources to support files() API (importlib_resources 1.5). * 📜🤖 Added by blurb_it. * Add some documentation about the new objects added. Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> --- Doc/library/importlib.rst | 37 ++++ Lib/importlib/_common.py | 72 ++++++++ Lib/importlib/abc.py | 86 ++++++++++ Lib/importlib/resources.py | 161 +++++++----------- Lib/test/test_importlib/test_files.py | 39 +++++ Lib/test/test_importlib/test_path.py | 1 + .../2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst | 1 + 7 files changed, 295 insertions(+), 102 deletions(-) create mode 100644 Lib/importlib/_common.py create mode 100644 Lib/test/test_importlib/test_files.py create mode 100644 Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index a612b1e1455a0c..99bfeacbbc7407 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -480,6 +480,8 @@ ABC hierarchy:: .. class:: ResourceReader + *Superseded by TraversableReader* + An :term:`abstract base class` to provide the ability to read *resources*. @@ -795,6 +797,28 @@ ABC hierarchy:: itself does not end in ``__init__``. +.. class:: Traversable + + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + + .. versionadded:: 3.9 + + +.. class:: TraversableReader + + An abstract base class for resource readers capable of serving + the ``files`` interface. Subclasses ResourceReader and provides + concrete implementations of the ResourceReader's abstract + methods. Therefore, any loader supplying TraversableReader + also supplies ResourceReader. + + Loaders that wish to support resource reading are expected to + implement this interface. + + .. versionadded:: 3.9 + + :mod:`importlib.resources` -- Resources --------------------------------------- @@ -853,6 +877,19 @@ The following types are defined. The following functions are available. + +.. function:: files(package) + + Returns an :class:`importlib.resources.abc.Traversable` object + representing the resource container for the package (think directory) + and its resources (think files). A Traversable may contain other + containers (think subdirectories). + + *package* is either a name or a module object which conforms to the + ``Package`` requirements. + + .. versionadded:: 3.9 + .. function:: open_binary(package, resource) Open for binary reading the *resource* within *package*. diff --git a/Lib/importlib/_common.py b/Lib/importlib/_common.py new file mode 100644 index 00000000000000..ba7cbac3c9bfda --- /dev/null +++ b/Lib/importlib/_common.py @@ -0,0 +1,72 @@ +import os +import pathlib +import zipfile +import tempfile +import functools +import contextlib + + +def from_package(package): + """ + Return a Traversable object for the given package. + + """ + spec = package.__spec__ + return from_traversable_resources(spec) or fallback_resources(spec) + + +def from_traversable_resources(spec): + """ + If the spec.loader implements TraversableResources, + directly or implicitly, it will have a ``files()`` method. + """ + with contextlib.suppress(AttributeError): + return spec.loader.files() + + +def fallback_resources(spec): + package_directory = pathlib.Path(spec.origin).parent + try: + archive_path = spec.loader.archive + rel_path = package_directory.relative_to(archive_path) + return zipfile.Path(archive_path, str(rel_path) + '/') + except Exception: + pass + return package_directory + + +@contextlib.contextmanager +def _tempfile(reader, suffix=''): + # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' + # blocks due to the need to close the temporary file to work on Windows + # properly. + fd, raw_path = tempfile.mkstemp(suffix=suffix) + try: + os.write(fd, reader()) + os.close(fd) + yield pathlib.Path(raw_path) + finally: + try: + os.remove(raw_path) + except FileNotFoundError: + pass + + +@functools.singledispatch +@contextlib.contextmanager +def as_file(path): + """ + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ + with _tempfile(path.read_bytes, suffix=path.name) as local: + yield local + + +@as_file.register(pathlib.Path) +@contextlib.contextmanager +def _(path): + """ + Degenerate behavior for pathlib.Path objects. + """ + yield path diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index b1b5ccce4bd35d..b8a9bb1a21ef77 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -14,6 +14,7 @@ _frozen_importlib_external = _bootstrap_external import abc import warnings +from typing import Protocol, runtime_checkable def _register(abstract_cls, *classes): @@ -386,3 +387,88 @@ def contents(self): _register(ResourceReader, machinery.SourceFileLoader) + + +@runtime_checkable +class Traversable(Protocol): + """ + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + """ + + @abc.abstractmethod + def iterdir(self): + """ + Yield Traversable objects in self + """ + + @abc.abstractmethod + def read_bytes(self): + """ + Read contents of self as bytes + """ + + @abc.abstractmethod + def read_text(self, encoding=None): + """ + Read contents of self as bytes + """ + + @abc.abstractmethod + def is_dir(self): + """ + Return True if self is a dir + """ + + @abc.abstractmethod + def is_file(self): + """ + Return True if self is a file + """ + + @abc.abstractmethod + def joinpath(self, child): + """ + Return Traversable child in self + """ + + @abc.abstractmethod + def __truediv__(self, child): + """ + Return Traversable child in self + """ + + @abc.abstractmethod + def open(self, mode='r', *args, **kwargs): + """ + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ + + @abc.abstractproperty + def name(self): + # type: () -> str + """ + The base name of this object without any parent references. + """ + + +class TraversableResources(ResourceReader): + @abc.abstractmethod + def files(self): + """Return a Traversable object for the loaded package.""" + + def open_resource(self, resource): + return self.files().joinpath(resource).open('rb') + + def resource_path(self, resource): + raise FileNotFoundError(resource) + + def is_resource(self, path): + return self.files().joinpath(path).isfile() + + def contents(self): + return (item.name for item in self.files().iterdir()) diff --git a/Lib/importlib/resources.py b/Lib/importlib/resources.py index f51886557466c7..b803a01c91d652 100644 --- a/Lib/importlib/resources.py +++ b/Lib/importlib/resources.py @@ -1,14 +1,15 @@ import os -import tempfile from . import abc as resources_abc +from . import _common +from ._common import as_file from contextlib import contextmanager, suppress from importlib import import_module from importlib.abc import ResourceLoader from io import BytesIO, TextIOWrapper from pathlib import Path from types import ModuleType -from typing import Iterable, Iterator, Optional, Union # noqa: F401 +from typing import ContextManager, Iterable, Optional, Union from typing import cast from typing.io import BinaryIO, TextIO @@ -16,7 +17,9 @@ __all__ = [ 'Package', 'Resource', + 'as_file', 'contents', + 'files', 'is_resource', 'open_binary', 'open_text', @@ -30,24 +33,23 @@ Resource = Union[str, os.PathLike] +def _resolve(name) -> ModuleType: + """If name is a string, resolve to a module.""" + if hasattr(name, '__spec__'): + return name + return import_module(name) + + def _get_package(package) -> ModuleType: """Take a package name or module object and return the module. - If a name, the module is imported. If the passed or imported module + If a name, the module is imported. If the resolved module object is not a package, raise an exception. """ - if hasattr(package, '__spec__'): - if package.__spec__.submodule_search_locations is None: - raise TypeError('{!r} is not a package'.format( - package.__spec__.name)) - else: - return package - else: - module = import_module(package) - if module.__spec__.submodule_search_locations is None: - raise TypeError('{!r} is not a package'.format(package)) - else: - return module + module = _resolve(package) + if module.__spec__.submodule_search_locations is None: + raise TypeError('{!r} is not a package'.format(package)) + return module def _normalize_path(path) -> str: @@ -58,8 +60,7 @@ def _normalize_path(path) -> str: parent, file_name = os.path.split(path) if parent: raise ValueError('{!r} must be only a file name'.format(path)) - else: - return file_name + return file_name def _get_resource_reader( @@ -88,8 +89,8 @@ def open_binary(package: Package, resource: Resource) -> BinaryIO: reader = _get_resource_reader(package) if reader is not None: return reader.open_resource(resource) - _check_location(package) - absolute_package_path = os.path.abspath(package.__spec__.origin) + absolute_package_path = os.path.abspath( + package.__spec__.origin or 'non-existent file') package_path = os.path.dirname(absolute_package_path) full_path = os.path.join(package_path, resource) try: @@ -108,8 +109,7 @@ def open_binary(package: Package, resource: Resource) -> BinaryIO: message = '{!r} resource not found in {!r}'.format( resource, package_name) raise FileNotFoundError(message) - else: - return BytesIO(data) + return BytesIO(data) def open_text(package: Package, @@ -117,39 +117,12 @@ def open_text(package: Package, encoding: str = 'utf-8', errors: str = 'strict') -> TextIO: """Return a file-like object opened for text reading of the resource.""" - resource = _normalize_path(resource) - package = _get_package(package) - reader = _get_resource_reader(package) - if reader is not None: - return TextIOWrapper(reader.open_resource(resource), encoding, errors) - _check_location(package) - absolute_package_path = os.path.abspath(package.__spec__.origin) - package_path = os.path.dirname(absolute_package_path) - full_path = os.path.join(package_path, resource) - try: - return open(full_path, mode='r', encoding=encoding, errors=errors) - except OSError: - # Just assume the loader is a resource loader; all the relevant - # importlib.machinery loaders are and an AttributeError for - # get_data() will make it clear what is needed from the loader. - loader = cast(ResourceLoader, package.__spec__.loader) - data = None - if hasattr(package.__spec__.loader, 'get_data'): - with suppress(OSError): - data = loader.get_data(full_path) - if data is None: - package_name = package.__spec__.name - message = '{!r} resource not found in {!r}'.format( - resource, package_name) - raise FileNotFoundError(message) - else: - return TextIOWrapper(BytesIO(data), encoding, errors) + return TextIOWrapper( + open_binary(package, resource), encoding=encoding, errors=errors) def read_binary(package: Package, resource: Resource) -> bytes: """Return the binary contents of the resource.""" - resource = _normalize_path(resource) - package = _get_package(package) with open_binary(package, resource) as fp: return fp.read() @@ -163,14 +136,20 @@ def read_text(package: Package, The decoding-related arguments have the same semantics as those of bytes.decode(). """ - resource = _normalize_path(resource) - package = _get_package(package) with open_text(package, resource, encoding, errors) as fp: return fp.read() -@contextmanager -def path(package: Package, resource: Resource) -> Iterator[Path]: +def files(package: Package) -> resources_abc.Traversable: + """ + Get a Traversable resource from a package + """ + return _common.from_package(_get_package(package)) + + +def path( + package: Package, resource: Resource, + ) -> 'ContextManager[Path]': """A context manager providing a file path object to the resource. If the resource does not already exist on its own on the file system, @@ -179,39 +158,23 @@ def path(package: Package, resource: Resource) -> Iterator[Path]: raised if the file was deleted prior to the context manager exiting). """ - resource = _normalize_path(resource) - package = _get_package(package) - reader = _get_resource_reader(package) - if reader is not None: - try: - yield Path(reader.resource_path(resource)) - return - except FileNotFoundError: - pass - else: - _check_location(package) - # Fall-through for both the lack of resource_path() *and* if - # resource_path() raises FileNotFoundError. - package_directory = Path(package.__spec__.origin).parent - file_path = package_directory / resource - if file_path.exists(): - yield file_path - else: - with open_binary(package, resource) as fp: - data = fp.read() - # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' - # blocks due to the need to close the temporary file to work on - # Windows properly. - fd, raw_path = tempfile.mkstemp() - try: - os.write(fd, data) - os.close(fd) - yield Path(raw_path) - finally: - try: - os.remove(raw_path) - except FileNotFoundError: - pass + reader = _get_resource_reader(_get_package(package)) + return ( + _path_from_reader(reader, resource) + if reader else + _common.as_file(files(package).joinpath(_normalize_path(resource))) + ) + + +@contextmanager +def _path_from_reader(reader, resource): + norm_resource = _normalize_path(resource) + with suppress(FileNotFoundError): + yield Path(reader.resource_path(norm_resource)) + return + opener_reader = reader.open_resource(norm_resource) + with _common._tempfile(opener_reader.read, suffix=norm_resource) as res: + yield res def is_resource(package: Package, name: str) -> bool: @@ -224,17 +187,10 @@ def is_resource(package: Package, name: str) -> bool: reader = _get_resource_reader(package) if reader is not None: return reader.is_resource(name) - try: - package_contents = set(contents(package)) - except (NotADirectoryError, FileNotFoundError): - return False + package_contents = set(contents(package)) if name not in package_contents: return False - # Just because the given file_name lives as an entry in the package's - # contents doesn't necessarily mean it's a resource. Directories are not - # resources, so let's try to find out if it's a directory or not. - path = Path(package.__spec__.origin).parent / name - return path.is_file() + return (_common.from_package(package) / name).is_file() def contents(package: Package) -> Iterable[str]: @@ -249,10 +205,11 @@ def contents(package: Package) -> Iterable[str]: if reader is not None: return reader.contents() # Is the package a namespace package? By definition, namespace packages - # cannot have resources. We could use _check_location() and catch the - # exception, but that's extra work, so just inline the check. - elif package.__spec__.origin is None or not package.__spec__.has_location: + # cannot have resources. + namespace = ( + package.__spec__.origin is None or + package.__spec__.origin == 'namespace' + ) + if namespace or not package.__spec__.has_location: return () - else: - package_directory = Path(package.__spec__.origin).parent - return os.listdir(package_directory) + return list(item.name for item in _common.from_package(package).iterdir()) diff --git a/Lib/test/test_importlib/test_files.py b/Lib/test/test_importlib/test_files.py new file mode 100644 index 00000000000000..fa7af82bf0c28b --- /dev/null +++ b/Lib/test/test_importlib/test_files.py @@ -0,0 +1,39 @@ +import typing +import unittest + +from importlib import resources +from importlib.abc import Traversable +from . import data01 +from . import util + + +class FilesTests: + def test_read_bytes(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_bytes() + assert actual == b'Hello, UTF-8 world!\n' + + def test_read_text(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_text() + assert actual == 'Hello, UTF-8 world!\n' + + @unittest.skipUnless( + hasattr(typing, 'runtime_checkable'), + "Only suitable when typing supports runtime_checkable", + ) + def test_traversable(self): + assert isinstance(resources.files(self.data), Traversable) + + +class OpenDiskTests(FilesTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/Lib/test/test_importlib/test_path.py b/Lib/test/test_importlib/test_path.py index 2d3dcda7ed2e79..c4e7285411322c 100644 --- a/Lib/test/test_importlib/test_path.py +++ b/Lib/test/test_importlib/test_path.py @@ -17,6 +17,7 @@ def test_reading(self): # Test also implicitly verifies the returned object is a pathlib.Path # instance. with resources.path(self.data, 'utf-8.file') as path: + self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) # pathlib.Path.read_text() was introduced in Python 3.5. with path.open('r', encoding='utf-8') as file: text = file.read() diff --git a/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst b/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst new file mode 100644 index 00000000000000..237bcf7f99b0f0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst @@ -0,0 +1 @@ +Added ``files()`` function to importlib.resources with support for subdirectories in package data, matching backport in importlib_resources 1.5. \ No newline at end of file From 77c614624b6bf2145bef69830d0f499d8b55ec0c Mon Sep 17 00:00:00 2001 From: Dong-hee Na Date: Sat, 9 May 2020 17:31:40 +0900 Subject: [PATCH 049/115] bpo-40566: Apply PEP 573 to abc module (GH-20005) --- .../2020-05-09-01-39-16.bpo-40566.wlcjW_.rst | 1 + Modules/_abc.c | 34 +++++++++++-------- 2 files changed, 20 insertions(+), 15 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst new file mode 100644 index 00000000000000..92a5e3ce632172 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst @@ -0,0 +1 @@ +Apply :pep:`573` to :mod:`abc`. diff --git a/Modules/_abc.c b/Modules/_abc.c index 7c040ef80ba3da..434bc454175b56 100644 --- a/Modules/_abc.c +++ b/Modules/_abc.c @@ -21,16 +21,9 @@ _Py_IDENTIFIER(__subclasshook__); typedef struct { PyTypeObject *_abc_data_type; + unsigned long long abc_invalidation_counter; } _abcmodule_state; -/* A global counter that is incremented each time a class is - registered as a virtual subclass of anything. It forces the - negative cache to be cleared before its next use. - Note: this counter is private. Use `abc.get_cache_token()` for - external code. */ -// FIXME: PEP 573: Move abc_invalidation_counter into _abcmodule_state. -static unsigned long long abc_invalidation_counter = 0; - static inline _abcmodule_state* get_abc_state(PyObject *module) { @@ -81,14 +74,21 @@ static PyObject * abc_data_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { _abc_data *self = (_abc_data *) type->tp_alloc(type, 0); + _abcmodule_state *state = NULL; if (self == NULL) { return NULL; } + state = PyType_GetModuleState(type); + if (state == NULL) { + Py_DECREF(self); + return NULL; + } + self->_abc_registry = NULL; self->_abc_cache = NULL; self->_abc_negative_cache = NULL; - self->_abc_negative_cache_version = abc_invalidation_counter; + self->_abc_negative_cache_version = state->abc_invalidation_counter; return (PyObject *) self; } @@ -495,7 +495,7 @@ _abc__abc_register_impl(PyObject *module, PyObject *self, PyObject *subclass) Py_DECREF(impl); /* Invalidate negative cache */ - abc_invalidation_counter++; + get_abc_state(module)->abc_invalidation_counter++; Py_INCREF(subclass); return subclass; @@ -540,7 +540,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self, } subtype = (PyObject *)Py_TYPE(instance); if (subtype == subclass) { - if (impl->_abc_negative_cache_version == abc_invalidation_counter) { + if (impl->_abc_negative_cache_version == get_abc_state(module)->abc_invalidation_counter) { incache = _in_weak_set(impl->_abc_negative_cache, subclass); if (incache < 0) { goto end; @@ -612,6 +612,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self, } PyObject *ok, *subclasses = NULL, *result = NULL; + _abcmodule_state *state = NULL; Py_ssize_t pos; int incache; _abc_data *impl = _get_impl(module, self); @@ -629,15 +630,16 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self, goto end; } + state = get_abc_state(module); /* 2. Check negative cache; may have to invalidate. */ - if (impl->_abc_negative_cache_version < abc_invalidation_counter) { + if (impl->_abc_negative_cache_version < state->abc_invalidation_counter) { /* Invalidate the negative cache. */ if (impl->_abc_negative_cache != NULL && PySet_Clear(impl->_abc_negative_cache) < 0) { goto end; } - impl->_abc_negative_cache_version = abc_invalidation_counter; + impl->_abc_negative_cache_version = state->abc_invalidation_counter; } else { incache = _in_weak_set(impl->_abc_negative_cache, subclass); @@ -830,7 +832,8 @@ static PyObject * _abc_get_cache_token_impl(PyObject *module) /*[clinic end generated code: output=c7d87841e033dacc input=70413d1c423ad9f9]*/ { - return PyLong_FromUnsignedLongLong(abc_invalidation_counter); + _abcmodule_state *state = get_abc_state(module); + return PyLong_FromUnsignedLongLong(state->abc_invalidation_counter); } static struct PyMethodDef _abcmodule_methods[] = { @@ -849,7 +852,8 @@ static int _abcmodule_exec(PyObject *module) { _abcmodule_state *state = get_abc_state(module); - state->_abc_data_type = (PyTypeObject *)PyType_FromSpec(&_abc_data_type_spec); + state->abc_invalidation_counter = 0; + state->_abc_data_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, &_abc_data_type_spec, NULL); if (state->_abc_data_type == NULL) { return -1; } From 2c3d508c5fabe40dac848fb9ae558069f0576879 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 9 May 2020 10:12:41 -0400 Subject: [PATCH 050/115] bpo-40570: Improve compatibility of uname_result with late-bound .platform (#20015) * bpo-40570: Improve compatibility of uname_result with late-bound .platform. * Add test capturing ability to cast uname to a tuple. --- Lib/platform.py | 7 ++++--- Lib/test/test_platform.py | 15 +++++++++++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/Lib/platform.py b/Lib/platform.py index 049c2c6ef25a1b..e9f50ab622d316 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -798,9 +798,10 @@ def __iter__(self): ) def __getitem__(self, key): - if key == 5: - return self.processor - return super().__getitem__(key) + return tuple(iter(self))[key] + + def __len__(self): + return len(tuple(iter(self))) _uname_cache = None diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index 7664b38a720a7e..a5c35dff79b8bc 100644 --- a/Lib/test/test_platform.py +++ b/Lib/test/test_platform.py @@ -154,11 +154,26 @@ def test_uname(self): res = platform.uname() self.assertTrue(any(res)) self.assertEqual(res[0], res.system) + self.assertEqual(res[-6], res.system) self.assertEqual(res[1], res.node) + self.assertEqual(res[-5], res.node) self.assertEqual(res[2], res.release) + self.assertEqual(res[-4], res.release) self.assertEqual(res[3], res.version) + self.assertEqual(res[-3], res.version) self.assertEqual(res[4], res.machine) + self.assertEqual(res[-2], res.machine) self.assertEqual(res[5], res.processor) + self.assertEqual(res[-1], res.processor) + self.assertEqual(len(res), 6) + + def test_uname_cast_to_tuple(self): + res = platform.uname() + expected = ( + res.system, res.node, res.release, res.version, res.machine, + res.processor, + ) + self.assertEqual(tuple(res), expected) @unittest.skipIf(sys.platform in ['win32', 'OpenVMS'], "uname -p not used") def test_uname_processor(self): From ac7a92cc0a821699df48bc2e30a02c25d6338f78 Mon Sep 17 00:00:00 2001 From: Pablo Galindo Date: Sun, 10 May 2020 05:34:50 +0100 Subject: [PATCH 051/115] bpo-40334: Avoid collisions between parser variables and grammar variables (GH-19987) This is for the C generator: - Disallow rule and variable names starting with `_` - Rename most local variable names generated by the parser to start with `_` Exceptions: - Renaming `p` to `_p` will be a separate PR - There are still some names that might clash, e.g. - anything starting with `Py` - C reserved words (`if` etc.) - Macros like `EXTRA` and `CHECK` --- Lib/test/test_peg_generator/test_pegen.py | 27 + Parser/pegen/parse.c | 11274 ++++++++-------- Parser/pegen/pegen.h | 2 +- Tools/peg_generator/pegen/c_generator.py | 160 +- Tools/peg_generator/pegen/parser_generator.py | 11 + 5 files changed, 5758 insertions(+), 5716 deletions(-) diff --git a/Lib/test/test_peg_generator/test_pegen.py b/Lib/test/test_peg_generator/test_pegen.py index 0a2a6d4ae16019..30e1b675643b23 100644 --- a/Lib/test/test_peg_generator/test_pegen.py +++ b/Lib/test/test_peg_generator/test_pegen.py @@ -540,6 +540,33 @@ def test_missing_start(self) -> None: with self.assertRaises(GrammarError): parser_class = make_parser(grammar) + def test_invalid_rule_name(self) -> None: + grammar = """ + start: _a b + _a: 'a' + b: 'b' + """ + with self.assertRaisesRegex(GrammarError, "cannot start with underscore: '_a'"): + parser_class = make_parser(grammar) + + def test_invalid_variable_name(self) -> None: + grammar = """ + start: a b + a: _x='a' + b: 'b' + """ + with self.assertRaisesRegex(GrammarError, "cannot start with underscore: '_x'"): + parser_class = make_parser(grammar) + + def test_invalid_variable_name_in_temporal_rule(self) -> None: + grammar = """ + start: a b + a: (_x='a' | 'b') | 'c' + b: 'b' + """ + with self.assertRaisesRegex(GrammarError, "cannot start with underscore: '_x'"): + parser_class = make_parser(grammar) + class TestGrammarVisitor: class Visitor(GrammarVisitor): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index ae86841e8663b0..27feda73d99e18 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -674,8 +674,8 @@ file_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // statements? $ void *a; Token * endmarker_var; @@ -685,18 +685,18 @@ file_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = _PyPegen_make_module ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_module ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // interactive: statement_newline @@ -706,26 +706,26 @@ interactive_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // statement_newline asdl_seq* a; if ( (a = statement_newline_rule(p)) // statement_newline ) { - res = Interactive ( a , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = Interactive ( a , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // eval: expressions NEWLINE* $ @@ -735,8 +735,8 @@ eval_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // expressions NEWLINE* $ asdl_seq * _loop0_1_var; expr_ty a; @@ -749,18 +749,18 @@ eval_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = Expression ( a , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = Expression ( a , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // func_type: '(' type_expressions? ')' '->' expression NEWLINE* $ @@ -770,24 +770,24 @@ func_type_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // '(' type_expressions? ')' '->' expression NEWLINE* $ + Token * _literal; + Token * _literal_1; + Token * _literal_2; asdl_seq * _loop0_2_var; void *a; expr_ty b; Token * endmarker_var; - Token * literal; - Token * literal_1; - Token * literal_2; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = type_expressions_rule(p), 1) // type_expressions? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 51)) // token='->' + (_literal_2 = _PyPegen_expect_token(p, 51)) // token='->' && (b = expression_rule(p)) // expression && @@ -796,18 +796,18 @@ func_type_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = FunctionType ( a , b , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = FunctionType ( a , b , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // fstring: star_expressions @@ -817,22 +817,22 @@ fstring_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // star_expressions expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // type_expressions: @@ -849,151 +849,151 @@ type_expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.expression+ ',' '*' expression ',' '**' expression + Token * _literal; + Token * _literal_1; + Token * _literal_2; + Token * _literal_3; asdl_seq * a; expr_ty b; expr_ty c; - Token * literal; - Token * literal_1; - Token * literal_2; - Token * literal_3; if ( (a = _gather_3_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' && (b = expression_rule(p)) // expression && - (literal_2 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_2 = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_3 = _PyPegen_expect_token(p, 35)) // token='**' + (_literal_3 = _PyPegen_expect_token(p, 35)) // token='**' && (c = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.expression+ ',' '*' expression + Token * _literal; + Token * _literal_1; asdl_seq * a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = _gather_5_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' && (b = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.expression+ ',' '**' expression + Token * _literal; + Token * _literal_1; asdl_seq * a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = _gather_7_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 35)) // token='**' + (_literal_1 = _PyPegen_expect_token(p, 35)) // token='**' && (b = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' expression ',' '**' expression + Token * _literal; + Token * _literal_1; + Token * _literal_2; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; - Token * literal_2; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = expression_rule(p)) // expression && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_2 = _PyPegen_expect_token(p, 35)) // token='**' + (_literal_2 = _PyPegen_expect_token(p, 35)) // token='**' && (b = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = expression_rule(p)) // expression ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (a = expression_rule(p)) // expression ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.expression+ asdl_seq * _gather_9_var; @@ -1001,14 +1001,14 @@ type_expressions_rule(Parser *p) (_gather_9_var = _gather_9_rule(p)) // ','.expression+ ) { - res = _gather_9_var; + _res = _gather_9_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // statements: statement+ @@ -1018,26 +1018,26 @@ statements_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // statement+ asdl_seq * a; if ( (a = _loop1_11_rule(p)) // statement+ ) { - res = _PyPegen_seq_flatten ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_flatten ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // statement: compound_stmt | simple_stmt @@ -1047,22 +1047,22 @@ statement_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // compound_stmt stmt_ty a; if ( (a = compound_stmt_rule(p)) // compound_stmt ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // simple_stmt asdl_seq* simple_stmt_var; @@ -1070,14 +1070,14 @@ statement_rule(Parser *p) (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { - res = simple_stmt_var; + _res = simple_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // statement_newline: compound_stmt NEWLINE | simple_stmt | NEWLINE | $ @@ -1087,16 +1087,16 @@ statement_newline_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // compound_stmt NEWLINE stmt_ty a; Token * newline_var; @@ -1106,14 +1106,14 @@ statement_newline_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // simple_stmt asdl_seq* simple_stmt_var; @@ -1121,10 +1121,10 @@ statement_newline_rule(Parser *p) (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { - res = simple_stmt_var; + _res = simple_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // NEWLINE Token * newline_var; @@ -1132,22 +1132,22 @@ statement_newline_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_singleton_seq ( p , CHECK ( _Py_Pass ( EXTRA ) ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_singleton_seq ( p , CHECK ( _Py_Pass ( EXTRA ) ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // $ Token * endmarker_var; @@ -1155,18 +1155,18 @@ statement_newline_rule(Parser *p) (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = _PyPegen_interactive_exit ( p ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_interactive_exit ( p ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // simple_stmt: small_stmt !';' NEWLINE | ';'.small_stmt+ ';'? NEWLINE @@ -1176,8 +1176,8 @@ simple_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // small_stmt !';' NEWLINE stmt_ty a; Token * newline_var; @@ -1189,40 +1189,40 @@ simple_stmt_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ';'.small_stmt+ ';'? NEWLINE + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; Token * newline_var; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_12_rule(p)) // ';'.small_stmt+ && - (opt_var = _PyPegen_expect_token(p, 13), 1) // ';'? + (_opt_var = _PyPegen_expect_token(p, 13), 1) // ';'? && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // small_stmt: @@ -1245,28 +1245,28 @@ small_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - if (_PyPegen_is_memoized(p, small_stmt_type, &res)) - return res; - int mark = p->mark; + stmt_ty _res = NULL; + if (_PyPegen_is_memoized(p, small_stmt_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // assignment stmt_ty assignment_var; if ( (assignment_var = assignment_rule(p)) // assignment ) { - res = assignment_var; + _res = assignment_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty e; @@ -1274,22 +1274,22 @@ small_stmt_rule(Parser *p) (e = star_expressions_rule(p)) // star_expressions ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Expr ( e , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Expr ( e , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &'return' return_stmt stmt_ty return_stmt_var; @@ -1299,10 +1299,10 @@ small_stmt_rule(Parser *p) (return_stmt_var = return_stmt_rule(p)) // return_stmt ) { - res = return_stmt_var; + _res = return_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('import' | 'from') import_stmt stmt_ty import_stmt_var; @@ -1312,10 +1312,10 @@ small_stmt_rule(Parser *p) (import_stmt_var = import_stmt_rule(p)) // import_stmt ) { - res = import_stmt_var; + _res = import_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'raise' raise_stmt stmt_ty raise_stmt_var; @@ -1325,33 +1325,33 @@ small_stmt_rule(Parser *p) (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt ) { - res = raise_stmt_var; + _res = raise_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'pass' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 502)) // token='pass' + (_keyword = _PyPegen_expect_token(p, 502)) // token='pass' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Pass ( EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Pass ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &'del' del_stmt stmt_ty del_stmt_var; @@ -1361,10 +1361,10 @@ small_stmt_rule(Parser *p) (del_stmt_var = del_stmt_rule(p)) // del_stmt ) { - res = del_stmt_var; + _res = del_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'yield' yield_stmt stmt_ty yield_stmt_var; @@ -1374,10 +1374,10 @@ small_stmt_rule(Parser *p) (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt ) { - res = yield_stmt_var; + _res = yield_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'assert' assert_stmt stmt_ty assert_stmt_var; @@ -1387,56 +1387,56 @@ small_stmt_rule(Parser *p) (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt ) { - res = assert_stmt_var; + _res = assert_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'break' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 506)) // token='break' + (_keyword = _PyPegen_expect_token(p, 506)) // token='break' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Break ( EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Break ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'continue' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 507)) // token='continue' + (_keyword = _PyPegen_expect_token(p, 507)) // token='continue' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Continue ( EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Continue ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &'global' global_stmt stmt_ty global_stmt_var; @@ -1446,10 +1446,10 @@ small_stmt_rule(Parser *p) (global_stmt_var = global_stmt_rule(p)) // global_stmt ) { - res = global_stmt_var; + _res = global_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'nonlocal' nonlocal_stmt stmt_ty nonlocal_stmt_var; @@ -1459,15 +1459,15 @@ small_stmt_rule(Parser *p) (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt ) { - res = nonlocal_stmt_var; + _res = nonlocal_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, small_stmt_type, res); - return res; + _PyPegen_insert_memo(p, _mark, small_stmt_type, _res); + return _res; } // compound_stmt: @@ -1484,8 +1484,8 @@ compound_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // &('def' | '@' | ASYNC) function_def stmt_ty function_def_var; if ( @@ -1494,10 +1494,10 @@ compound_stmt_rule(Parser *p) (function_def_var = function_def_rule(p)) // function_def ) { - res = function_def_var; + _res = function_def_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'if' if_stmt stmt_ty if_stmt_var; @@ -1507,10 +1507,10 @@ compound_stmt_rule(Parser *p) (if_stmt_var = if_stmt_rule(p)) // if_stmt ) { - res = if_stmt_var; + _res = if_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('class' | '@') class_def stmt_ty class_def_var; @@ -1520,10 +1520,10 @@ compound_stmt_rule(Parser *p) (class_def_var = class_def_rule(p)) // class_def ) { - res = class_def_var; + _res = class_def_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('with' | ASYNC) with_stmt stmt_ty with_stmt_var; @@ -1533,10 +1533,10 @@ compound_stmt_rule(Parser *p) (with_stmt_var = with_stmt_rule(p)) // with_stmt ) { - res = with_stmt_var; + _res = with_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('for' | ASYNC) for_stmt stmt_ty for_stmt_var; @@ -1546,10 +1546,10 @@ compound_stmt_rule(Parser *p) (for_stmt_var = for_stmt_rule(p)) // for_stmt ) { - res = for_stmt_var; + _res = for_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'try' try_stmt stmt_ty try_stmt_var; @@ -1559,10 +1559,10 @@ compound_stmt_rule(Parser *p) (try_stmt_var = try_stmt_rule(p)) // try_stmt ) { - res = try_stmt_var; + _res = try_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'while' while_stmt stmt_ty while_stmt_var; @@ -1572,14 +1572,14 @@ compound_stmt_rule(Parser *p) (while_stmt_var = while_stmt_rule(p)) // while_stmt ) { - res = while_stmt_var; + _res = while_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // assignment: @@ -1594,79 +1594,79 @@ assignment_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME ':' expression ['=' annotated_rhs] + Token * _literal; expr_ty a; expr_ty b; void *c; - Token * literal; if ( (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p)) // expression && (c = _tmp_19_rule(p), 1) // ['=' annotated_rhs] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 6 , "Variable annotation syntax is" , _Py_AnnAssign ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , c , 1 , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 6 , "Variable annotation syntax is" , _Py_AnnAssign ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , c , 1 , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ('(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target) ':' expression ['=' annotated_rhs] + Token * _literal; void *a; expr_ty b; void *c; - Token * literal; if ( (a = _tmp_20_rule(p)) // '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p)) // expression && (c = _tmp_21_rule(p), 1) // ['=' annotated_rhs] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 6 , "Variable annotations syntax is" , _Py_AnnAssign ( a , b , c , 0 , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 6 , "Variable annotations syntax is" , _Py_AnnAssign ( a , b , c , 0 , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? asdl_seq * a; @@ -1680,22 +1680,22 @@ assignment_rule(Parser *p) (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Assign ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Assign ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // target augassign (yield_expr | star_expressions) expr_ty a; @@ -1709,22 +1709,22 @@ assignment_rule(Parser *p) (c = _tmp_24_rule(p)) // yield_expr | star_expressions ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_AugAssign ( a , b -> kind , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_AugAssign ( a , b -> kind , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_assignment void *invalid_assignment_var; @@ -1732,14 +1732,14 @@ assignment_rule(Parser *p) (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment ) { - res = invalid_assignment_var; + _res = invalid_assignment_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // augassign: @@ -1762,206 +1762,206 @@ augassign_rule(Parser *p) if (p->error_indicator) { return NULL; } - AugOperator* res = NULL; - int mark = p->mark; + AugOperator* _res = NULL; + int _mark = p->mark; { // '+=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 36)) // token='+=' + (_literal = _PyPegen_expect_token(p, 36)) // token='+=' ) { - res = _PyPegen_augoperator ( p , Add ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Add ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '-=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 37)) // token='-=' + (_literal = _PyPegen_expect_token(p, 37)) // token='-=' ) { - res = _PyPegen_augoperator ( p , Sub ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Sub ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 38)) // token='*=' + (_literal = _PyPegen_expect_token(p, 38)) // token='*=' ) { - res = _PyPegen_augoperator ( p , Mult ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Mult ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '@=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 50)) // token='@=' + (_literal = _PyPegen_expect_token(p, 50)) // token='@=' ) { - res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); - if (res == NULL && PyErr_Occurred()) { + _res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '/=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 39)) // token='/=' + (_literal = _PyPegen_expect_token(p, 39)) // token='/=' ) { - res = _PyPegen_augoperator ( p , Div ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Div ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '%=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 40)) // token='%=' + (_literal = _PyPegen_expect_token(p, 40)) // token='%=' ) { - res = _PyPegen_augoperator ( p , Mod ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Mod ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '&=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 41)) // token='&=' + (_literal = _PyPegen_expect_token(p, 41)) // token='&=' ) { - res = _PyPegen_augoperator ( p , BitAnd ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , BitAnd ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '|=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 42)) // token='|=' + (_literal = _PyPegen_expect_token(p, 42)) // token='|=' ) { - res = _PyPegen_augoperator ( p , BitOr ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , BitOr ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '^=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 43)) // token='^=' + (_literal = _PyPegen_expect_token(p, 43)) // token='^=' ) { - res = _PyPegen_augoperator ( p , BitXor ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , BitXor ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '<<=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 44)) // token='<<=' + (_literal = _PyPegen_expect_token(p, 44)) // token='<<=' ) { - res = _PyPegen_augoperator ( p , LShift ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , LShift ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '>>=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 45)) // token='>>=' + (_literal = _PyPegen_expect_token(p, 45)) // token='>>=' ) { - res = _PyPegen_augoperator ( p , RShift ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , RShift ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '**=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 46)) // token='**=' + (_literal = _PyPegen_expect_token(p, 46)) // token='**=' ) { - res = _PyPegen_augoperator ( p , Pow ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Pow ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '//=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 48)) // token='//=' + (_literal = _PyPegen_expect_token(p, 48)) // token='//=' ) { - res = _PyPegen_augoperator ( p , FloorDiv ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , FloorDiv ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // global_stmt: 'global' ','.NAME+ @@ -1971,45 +1971,45 @@ global_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'global' ','.NAME+ + Token * _keyword; asdl_seq * a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 508)) // token='global' + (_keyword = _PyPegen_expect_token(p, 508)) // token='global' && (a = _gather_25_rule(p)) // ','.NAME+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Global ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Global ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // nonlocal_stmt: 'nonlocal' ','.NAME+ @@ -2019,45 +2019,45 @@ nonlocal_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'nonlocal' ','.NAME+ + Token * _keyword; asdl_seq * a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 509)) // token='nonlocal' + (_keyword = _PyPegen_expect_token(p, 509)) // token='nonlocal' && (a = _gather_27_rule(p)) // ','.NAME+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Nonlocal ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Nonlocal ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // yield_stmt: yield_expr @@ -2067,42 +2067,42 @@ yield_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // yield_expr expr_ty y; if ( (y = yield_expr_rule(p)) // yield_expr ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Expr ( y , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Expr ( y , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // assert_stmt: 'assert' expression [',' expression] @@ -2112,48 +2112,48 @@ assert_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'assert' expression [',' expression] + Token * _keyword; expr_ty a; void *b; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 505)) // token='assert' + (_keyword = _PyPegen_expect_token(p, 505)) // token='assert' && (a = expression_rule(p)) // expression && (b = _tmp_29_rule(p), 1) // [',' expression] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Assert ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Assert ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // del_stmt: 'del' del_targets @@ -2163,45 +2163,45 @@ del_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'del' del_targets + Token * _keyword; asdl_seq* a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 503)) // token='del' + (_keyword = _PyPegen_expect_token(p, 503)) // token='del' && (a = del_targets_rule(p)) // del_targets ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Delete ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Delete ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_stmt: import_name | import_from @@ -2211,18 +2211,18 @@ import_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // import_name stmt_ty import_name_var; if ( (import_name_var = import_name_rule(p)) // import_name ) { - res = import_name_var; + _res = import_name_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // import_from stmt_ty import_from_var; @@ -2230,14 +2230,14 @@ import_stmt_rule(Parser *p) (import_from_var = import_from_rule(p)) // import_from ) { - res = import_from_var; + _res = import_from_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_name: 'import' dotted_as_names @@ -2247,45 +2247,45 @@ import_name_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'import' dotted_as_names + Token * _keyword; asdl_seq* a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 513)) // token='import' + (_keyword = _PyPegen_expect_token(p, 513)) // token='import' && (a = dotted_as_names_rule(p)) // dotted_as_names ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Import ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Import ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from: @@ -2297,86 +2297,86 @@ import_from_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'from' (('.' | '...'))* dotted_name 'import' import_from_targets + Token * _keyword; + Token * _keyword_1; asdl_seq * a; expr_ty b; asdl_seq* c; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 514)) // token='from' + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && (a = _loop0_30_rule(p)) // (('.' | '...'))* && (b = dotted_name_rule(p)) // dotted_name && - (keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && (c = import_from_targets_rule(p)) // import_from_targets ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ImportFrom ( b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ImportFrom ( b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'from' (('.' | '...'))+ 'import' import_from_targets + Token * _keyword; + Token * _keyword_1; asdl_seq * a; asdl_seq* b; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 514)) // token='from' + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && (a = _loop1_31_rule(p)) // (('.' | '...'))+ && - (keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && (b = import_from_targets_rule(p)) // import_from_targets ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from_targets: '(' import_from_as_names ','? ')' | import_from_as_names | '*' @@ -2386,32 +2386,32 @@ import_from_targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // '(' import_from_as_names ','? ')' + Token * _literal; + Token * _literal_1; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq* a; - Token * literal; - Token * literal_1; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = import_from_as_names_rule(p)) // import_from_as_names && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // import_from_as_names asdl_seq* import_from_as_names_var; @@ -2419,29 +2419,29 @@ import_from_targets_rule(Parser *p) (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names ) { - res = import_from_as_names_var; + _res = import_from_as_names_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from_as_names: ','.import_from_as_name+ @@ -2451,26 +2451,26 @@ import_from_as_names_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.import_from_as_name+ asdl_seq * a; if ( (a = _gather_32_rule(p)) // ','.import_from_as_name+ ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from_as_name: NAME ['as' NAME] @@ -2480,8 +2480,8 @@ import_from_as_name_rule(Parser *p) if (p->error_indicator) { return NULL; } - alias_ty res = NULL; - int mark = p->mark; + alias_ty _res = NULL; + int _mark = p->mark; { // NAME ['as' NAME] expr_ty a; void *b; @@ -2491,18 +2491,18 @@ import_from_as_name_rule(Parser *p) (b = _tmp_34_rule(p), 1) // ['as' NAME] ) { - res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dotted_as_names: ','.dotted_as_name+ @@ -2512,26 +2512,26 @@ dotted_as_names_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.dotted_as_name+ asdl_seq * a; if ( (a = _gather_35_rule(p)) // ','.dotted_as_name+ ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dotted_as_name: dotted_name ['as' NAME] @@ -2541,8 +2541,8 @@ dotted_as_name_rule(Parser *p) if (p->error_indicator) { return NULL; } - alias_ty res = NULL; - int mark = p->mark; + alias_ty _res = NULL; + int _mark = p->mark; { // dotted_name ['as' NAME] expr_ty a; void *b; @@ -2552,18 +2552,18 @@ dotted_as_name_rule(Parser *p) (b = _tmp_37_rule(p), 1) // ['as' NAME] ) { - res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -2572,25 +2572,25 @@ static expr_ty dotted_name_raw(Parser *); static expr_ty dotted_name_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, dotted_name_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, dotted_name_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_0 = _PyPegen_update_memo(p, mark, dotted_name_type, res); + int tmpvar_0 = _PyPegen_update_memo(p, _mark, dotted_name_type, _res); if (tmpvar_0) { - return res; + return _res; } - p->mark = mark; - void *raw = dotted_name_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = dotted_name_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty dotted_name_raw(Parser *p) @@ -2598,28 +2598,28 @@ dotted_name_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // dotted_name '.' NAME + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = dotted_name_rule(p)) // dotted_name && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_join_names_with_dot ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_join_names_with_dot ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // NAME expr_ty name_var; @@ -2627,14 +2627,14 @@ dotted_name_raw(Parser *p) (name_var = _PyPegen_name_token(p)) // NAME ) { - res = name_var; + _res = name_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // if_stmt: @@ -2646,89 +2646,89 @@ if_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'if' named_expression ':' block elif_stmt + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; stmt_ty c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 510)) // token='if' + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (c = elif_stmt_rule(p)) // elif_stmt ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'if' named_expression ':' block else_block? + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; void *c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 510)) // token='if' + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (c = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // elif_stmt: @@ -2740,89 +2740,89 @@ elif_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'elif' named_expression ':' block elif_stmt + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; stmt_ty c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 515)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 515)) // token='elif' && (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (c = elif_stmt_rule(p)) // elif_stmt ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'elif' named_expression ':' block else_block? + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; void *c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 515)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 515)) // token='elif' && (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (c = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // else_block: 'else' ':' block @@ -2832,32 +2832,32 @@ else_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // 'else' ':' block + Token * _keyword; + Token * _literal; asdl_seq* b; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 516)) // token='else' + (_keyword = _PyPegen_expect_token(p, 516)) // token='else' && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block ) { - res = b; - if (res == NULL && PyErr_Occurred()) { + _res = b; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // while_stmt: 'while' named_expression ':' block else_block? @@ -2867,54 +2867,54 @@ while_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'while' named_expression ':' block else_block? + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; void *c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 512)) // token='while' + (_keyword = _PyPegen_expect_token(p, 512)) // token='while' && (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (c = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_While ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_While ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // for_stmt: @@ -2926,35 +2926,35 @@ for_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + Token * _keyword; + Token * _keyword_1; + Token * _literal; asdl_seq* b; void *el; expr_ty ex; - Token * keyword; - Token * keyword_1; - Token * literal; expr_ty t; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 517)) // token='for' + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && (ex = star_expressions_rule(p)) // star_expressions && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && @@ -2963,45 +2963,45 @@ for_stmt_rule(Parser *p) (el = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_For ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_For ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + Token * _keyword; + Token * _keyword_1; + Token * _literal; Token * async_var; asdl_seq* b; void *el; expr_ty ex; - Token * keyword; - Token * keyword_1; - Token * literal; expr_ty t; void *tc; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 517)) // token='for' + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && (ex = star_expressions_rule(p)) // star_expressions && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && @@ -3010,26 +3010,26 @@ for_stmt_rule(Parser *p) (el = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async for loops are" , _Py_AsyncFor ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async for loops are" , _Py_AsyncFor ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // with_stmt: @@ -3043,179 +3043,179 @@ with_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'with' '(' ','.with_item+ ','? ')' ':' block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (keyword = _PyPegen_expect_token(p, 519)) // token='with' + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = _gather_38_rule(p)) // ','.with_item+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_With ( a , b , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_With ( a , b , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'with' ','.with_item+ ':' TYPE_COMMENT? block + Token * _keyword; + Token * _literal; asdl_seq * a; asdl_seq* b; - Token * keyword; - Token * literal; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 519)) // token='with' + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && (a = _gather_40_rule(p)) // ','.with_item+ && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_With ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_With ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; Token * async_var; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 519)) // token='with' + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = _gather_42_rule(p)) // ','.with_item+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NULL , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NULL , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block + Token * _keyword; + Token * _literal; asdl_seq * a; Token * async_var; asdl_seq* b; - Token * keyword; - Token * literal; void *tc; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 519)) // token='with' + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && (a = _gather_44_rule(p)) // ','.with_item+ && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // with_item: expression ['as' target] @@ -3225,8 +3225,8 @@ with_item_rule(Parser *p) if (p->error_indicator) { return NULL; } - withitem_ty res = NULL; - int mark = p->mark; + withitem_ty _res = NULL; + int _mark = p->mark; { // expression ['as' target] expr_ty e; void *o; @@ -3236,18 +3236,18 @@ with_item_rule(Parser *p) (o = _tmp_46_rule(p), 1) // ['as' target] ) { - res = _Py_withitem ( e , o , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_withitem ( e , o , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // try_stmt: @@ -3259,59 +3259,59 @@ try_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'try' ':' block finally_block + Token * _keyword; + Token * _literal; asdl_seq* b; asdl_seq* f; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 511)) // token='try' + (_keyword = _PyPegen_expect_token(p, 511)) // token='try' && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && (f = finally_block_rule(p)) // finally_block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Try ( b , NULL , NULL , f , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Try ( b , NULL , NULL , f , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'try' ':' block except_block+ else_block? finally_block? + Token * _keyword; + Token * _literal; asdl_seq* b; void *el; asdl_seq * ex; void *f; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 511)) // token='try' + (_keyword = _PyPegen_expect_token(p, 511)) // token='try' && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block && @@ -3322,26 +3322,26 @@ try_stmt_rule(Parser *p) (f = finally_block_rule(p), 1) // finally_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Try ( b , ex , el , f , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Try ( b , ex , el , f , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // except_block: 'except' expression ['as' target] ':' block | 'except' ':' block @@ -3351,83 +3351,83 @@ except_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - excepthandler_ty res = NULL; - int mark = p->mark; + excepthandler_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'except' expression ['as' target] ':' block + Token * _keyword; + Token * _literal; asdl_seq* b; expr_ty e; - Token * keyword; - Token * literal; void *t; if ( - (keyword = _PyPegen_expect_token(p, 520)) // token='except' + (_keyword = _PyPegen_expect_token(p, 520)) // token='except' && (e = expression_rule(p)) // expression && (t = _tmp_48_rule(p), 1) // ['as' target] && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'except' ':' block + Token * _keyword; + Token * _literal; asdl_seq* b; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 520)) // token='except' + (_keyword = _PyPegen_expect_token(p, 520)) // token='except' && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ExceptHandler ( NULL , NULL , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ExceptHandler ( NULL , NULL , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // finally_block: 'finally' ':' block @@ -3437,32 +3437,32 @@ finally_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // 'finally' ':' block + Token * _keyword; + Token * _literal; asdl_seq* a; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 521)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 521)) // token='finally' && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (a = block_rule(p)) // block ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // return_stmt: 'return' star_expressions? @@ -3472,45 +3472,45 @@ return_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'return' star_expressions? + Token * _keyword; void *a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 500)) // token='return' + (_keyword = _PyPegen_expect_token(p, 500)) // token='return' && (a = star_expressions_rule(p), 1) // star_expressions? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Return ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Return ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // raise_stmt: 'raise' expression ['from' expression] | 'raise' @@ -3520,71 +3520,71 @@ raise_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'raise' expression ['from' expression] + Token * _keyword; expr_ty a; void *b; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 501)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' && (a = expression_rule(p)) // expression && (b = _tmp_49_rule(p), 1) // ['from' expression] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Raise ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Raise ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'raise' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 501)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Raise ( NULL , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Raise ( NULL , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // function_def: decorators function_def_raw | function_def_raw @@ -3594,8 +3594,8 @@ function_def_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // decorators function_def_raw asdl_seq* d; stmt_ty f; @@ -3605,14 +3605,14 @@ function_def_rule(Parser *p) (f = function_def_raw_rule(p)) // function_def_raw ) { - res = _PyPegen_function_def_decorators ( p , d , f ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_function_def_decorators ( p , d , f ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // function_def_raw stmt_ty function_def_raw_var; @@ -3620,14 +3620,14 @@ function_def_rule(Parser *p) (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw ) { - res = function_def_raw_var; + _res = function_def_raw_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // function_def_raw: @@ -3639,116 +3639,116 @@ function_def_raw_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; void *a; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; expr_ty n; void *params; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 522)) // token='def' + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (params = params_rule(p), 1) // params? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && (a = _tmp_50_rule(p), 1) // ['->' expression] && - (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && (tc = func_type_comment_rule(p), 1) // func_type_comment? && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_FunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_FunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; void *a; Token * async_var; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; expr_ty n; void *params; void *tc; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 522)) // token='def' + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (params = params_rule(p), 1) // params? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && (a = _tmp_51_rule(p), 1) // ['->' expression] && - (literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && (tc = func_type_comment_rule(p), 1) // func_type_comment? && (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async functions are" , _Py_AsyncFunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async functions are" , _Py_AsyncFunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // func_type_comment: @@ -3761,8 +3761,8 @@ func_type_comment_rule(Parser *p) if (p->error_indicator) { return NULL; } - Token* res = NULL; - int mark = p->mark; + Token* _res = NULL; + int _mark = p->mark; { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) Token * newline_var; Token * t; @@ -3774,14 +3774,14 @@ func_type_comment_rule(Parser *p) _PyPegen_lookahead(1, _tmp_52_rule, p) ) { - res = t; - if (res == NULL && PyErr_Occurred()) { + _res = t; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_double_type_comments void *invalid_double_type_comments_var; @@ -3789,10 +3789,10 @@ func_type_comment_rule(Parser *p) (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments ) { - res = invalid_double_type_comments_var; + _res = invalid_double_type_comments_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // TYPE_COMMENT Token * type_comment_var; @@ -3800,14 +3800,14 @@ func_type_comment_rule(Parser *p) (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' ) { - res = type_comment_var; + _res = type_comment_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // params: invalid_parameters | parameters @@ -3817,18 +3817,18 @@ params_rule(Parser *p) if (p->error_indicator) { return NULL; } - arguments_ty res = NULL; - int mark = p->mark; + arguments_ty _res = NULL; + int _mark = p->mark; { // invalid_parameters void *invalid_parameters_var; if ( (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters ) { - res = invalid_parameters_var; + _res = invalid_parameters_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // parameters arguments_ty parameters_var; @@ -3836,14 +3836,14 @@ params_rule(Parser *p) (parameters_var = parameters_rule(p)) // parameters ) { - res = parameters_var; + _res = parameters_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // parameters: @@ -3858,8 +3858,8 @@ parameters_rule(Parser *p) if (p->error_indicator) { return NULL; } - arguments_ty res = NULL; - int mark = p->mark; + arguments_ty _res = NULL; + int _mark = p->mark; { // slash_no_default param_no_default* param_with_default* star_etc? asdl_seq* a; asdl_seq * b; @@ -3875,14 +3875,14 @@ parameters_rule(Parser *p) (d = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // slash_with_default param_with_default* star_etc? SlashWithDefault* a; @@ -3896,14 +3896,14 @@ parameters_rule(Parser *p) (c = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_no_default+ param_with_default* star_etc? asdl_seq * a; @@ -3917,14 +3917,14 @@ parameters_rule(Parser *p) (c = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_with_default+ star_etc? asdl_seq * a; @@ -3935,14 +3935,14 @@ parameters_rule(Parser *p) (b = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_etc StarEtc* a; @@ -3950,18 +3950,18 @@ parameters_rule(Parser *p) (a = star_etc_rule(p)) // star_etc ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slash_no_default: param_no_default+ '/' ',' | param_no_default+ '/' &')' @@ -3971,52 +3971,52 @@ slash_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // param_no_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; - Token * literal; - Token * literal_1; if ( (a = _loop1_59_rule(p)) // param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_no_default+ '/' &')' + Token * _literal; asdl_seq * a; - Token * literal; if ( (a = _loop1_60_rule(p)) // param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slash_with_default: @@ -4028,58 +4028,58 @@ slash_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - SlashWithDefault* res = NULL; - int mark = p->mark; + SlashWithDefault* _res = NULL; + int _mark = p->mark; { // param_no_default* param_with_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; asdl_seq * b; - Token * literal; - Token * literal_1; if ( (a = _loop0_61_rule(p)) // param_no_default* && (b = _loop1_62_rule(p)) // param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_no_default* param_with_default+ '/' &')' + Token * _literal; asdl_seq * a; asdl_seq * b; - Token * literal; if ( (a = _loop0_63_rule(p)) // param_no_default* && (b = _loop1_64_rule(p)) // param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_etc: @@ -4093,15 +4093,15 @@ star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - StarEtc* res = NULL; - int mark = p->mark; + StarEtc* _res = NULL; + int _mark = p->mark; { // '*' param_no_default param_maybe_default* kwds? + Token * _literal; arg_ty a; asdl_seq * b; void *c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = param_no_default_rule(p)) // param_no_default && @@ -4110,38 +4110,38 @@ star_etc_rule(Parser *p) (c = kwds_rule(p), 1) // kwds? ) { - res = _PyPegen_star_etc ( p , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' ',' param_maybe_default+ kwds? + Token * _literal; + Token * _literal_1; asdl_seq * b; void *c; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && (b = _loop1_66_rule(p)) // param_maybe_default+ && (c = kwds_rule(p), 1) // kwds? ) { - res = _PyPegen_star_etc ( p , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // kwds arg_ty a; @@ -4149,14 +4149,14 @@ star_etc_rule(Parser *p) (a = kwds_rule(p)) // kwds ) { - res = _PyPegen_star_etc ( p , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_star_etc void *invalid_star_etc_var; @@ -4164,14 +4164,14 @@ star_etc_rule(Parser *p) (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc ) { - res = invalid_star_etc_var; + _res = invalid_star_etc_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kwds: '**' param_no_default @@ -4181,29 +4181,29 @@ kwds_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // '**' param_no_default + Token * _literal; arg_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (a = param_no_default_rule(p)) // param_no_default ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param_no_default: param ',' TYPE_COMMENT? | param TYPE_COMMENT? &')' @@ -4213,28 +4213,28 @@ param_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // param ',' TYPE_COMMENT? + Token * _literal; arg_ty a; - Token * literal; void *tc; if ( (a = param_rule(p)) // param && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param TYPE_COMMENT? &')' arg_ty a; @@ -4247,18 +4247,18 @@ param_no_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param_with_default: param default ',' TYPE_COMMENT? | param default TYPE_COMMENT? &')' @@ -4268,31 +4268,31 @@ param_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // param default ',' TYPE_COMMENT? + Token * _literal; arg_ty a; expr_ty c; - Token * literal; void *tc; if ( (a = param_rule(p)) // param && (c = default_rule(p)) // default && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param default TYPE_COMMENT? &')' arg_ty a; @@ -4308,18 +4308,18 @@ param_with_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param_maybe_default: @@ -4331,31 +4331,31 @@ param_maybe_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // param default? ',' TYPE_COMMENT? + Token * _literal; arg_ty a; void *c; - Token * literal; void *tc; if ( (a = param_rule(p)) // param && (c = default_rule(p), 1) // default? && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param default? TYPE_COMMENT? &')' arg_ty a; @@ -4371,18 +4371,18 @@ param_maybe_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param: NAME annotation? @@ -4392,16 +4392,16 @@ param_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME annotation? expr_ty a; void *b; @@ -4411,26 +4411,26 @@ param_rule(Parser *p) (b = annotation_rule(p), 1) // annotation? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_arg ( a -> v . Name . id , b , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_arg ( a -> v . Name . id , b , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // annotation: ':' expression @@ -4440,29 +4440,29 @@ annotation_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // ':' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (a = expression_rule(p)) // expression ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // default: '=' expression @@ -4472,29 +4472,29 @@ default_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // '=' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (a = expression_rule(p)) // expression ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // decorators: (('@' named_expression NEWLINE))+ @@ -4504,26 +4504,26 @@ decorators_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // (('@' named_expression NEWLINE))+ asdl_seq * a; if ( (a = _loop1_67_rule(p)) // (('@' named_expression NEWLINE))+ ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // class_def: decorators class_def_raw | class_def_raw @@ -4533,8 +4533,8 @@ class_def_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // decorators class_def_raw asdl_seq* a; stmt_ty b; @@ -4544,14 +4544,14 @@ class_def_rule(Parser *p) (b = class_def_raw_rule(p)) // class_def_raw ) { - res = _PyPegen_class_def_decorators ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_class_def_decorators ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // class_def_raw stmt_ty class_def_raw_var; @@ -4559,14 +4559,14 @@ class_def_rule(Parser *p) (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw ) { - res = class_def_raw_var; + _res = class_def_raw_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // class_def_raw: 'class' NAME ['(' arguments? ')'] ':' block @@ -4576,54 +4576,54 @@ class_def_raw_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'class' NAME ['(' arguments? ')'] ':' block + Token * _keyword; + Token * _literal; expr_ty a; void *b; asdl_seq* c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 523)) // token='class' + (_keyword = _PyPegen_expect_token(p, 523)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && (b = _tmp_68_rule(p), 1) // ['(' arguments? ')'] && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (c = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ClassDef ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , c , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ClassDef ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , c , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // block: NEWLINE INDENT statements DEDENT | simple_stmt | invalid_block @@ -4633,10 +4633,10 @@ block_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - if (_PyPegen_is_memoized(p, block_type, &res)) - return res; - int mark = p->mark; + asdl_seq* _res = NULL; + if (_PyPegen_is_memoized(p, block_type, &_res)) + return _res; + int _mark = p->mark; { // NEWLINE INDENT statements DEDENT asdl_seq* a; Token * dedent_var; @@ -4652,14 +4652,14 @@ block_rule(Parser *p) (dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // simple_stmt asdl_seq* simple_stmt_var; @@ -4667,10 +4667,10 @@ block_rule(Parser *p) (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { - res = simple_stmt_var; + _res = simple_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_block void *invalid_block_var; @@ -4678,15 +4678,15 @@ block_rule(Parser *p) (invalid_block_var = invalid_block_rule(p)) // invalid_block ) { - res = invalid_block_var; + _res = invalid_block_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, block_type, res); - return res; + _PyPegen_insert_memo(p, _mark, block_type, _res); + return _res; } // expressions_list: ','.star_expression+ ','? @@ -4696,30 +4696,30 @@ expressions_list_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.star_expression+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_69_rule(p)) // ','.star_expression+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_expressions: @@ -4732,71 +4732,71 @@ star_expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // star_expression ((',' star_expression))+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; asdl_seq * b; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = star_expression_rule(p)) // star_expression && (b = _loop1_71_rule(p)) // ((',' star_expression))+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expression ',' + Token * _literal; expr_ty a; - Token * literal; if ( (a = star_expression_rule(p)) // star_expression && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expression expr_ty star_expression_var; @@ -4804,14 +4804,14 @@ star_expressions_rule(Parser *p) (star_expression_var = star_expression_rule(p)) // star_expression ) { - res = star_expression_var; + _res = star_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_expression: '*' bitwise_or | expression @@ -4821,43 +4821,43 @@ star_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, star_expression_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, star_expression_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = bitwise_or_rule(p)) // bitwise_or ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression expr_ty expression_var; @@ -4865,15 +4865,15 @@ star_expression_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { - res = expression_var; + _res = expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, star_expression_type, res); - return res; + _PyPegen_insert_memo(p, _mark, star_expression_type, _res); + return _res; } // star_named_expressions: ','.star_named_expression+ ','? @@ -4883,30 +4883,30 @@ star_named_expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.star_named_expression+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_72_rule(p)) // ','.star_named_expression+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_named_expression: '*' bitwise_or | named_expression @@ -4916,41 +4916,41 @@ star_named_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = bitwise_or_rule(p)) // bitwise_or ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // named_expression expr_ty named_expression_var; @@ -4958,14 +4958,14 @@ star_named_expression_rule(Parser *p) (named_expression_var = named_expression_rule(p)) // named_expression ) { - res = named_expression_var; + _res = named_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // named_expression: NAME ':=' expression | expression !':=' | invalid_named_expression @@ -4975,44 +4975,44 @@ named_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME ':=' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 53)) // token=':=' + (_literal = _PyPegen_expect_token(p, 53)) // token=':=' && (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_NamedExpr ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_NamedExpr ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression !':=' expr_ty expression_var; @@ -5022,10 +5022,10 @@ named_expression_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - res = expression_var; + _res = expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_named_expression void *invalid_named_expression_var; @@ -5033,14 +5033,14 @@ named_expression_rule(Parser *p) (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression ) { - res = invalid_named_expression_var; + _res = invalid_named_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // annotated_rhs: yield_expr | star_expressions @@ -5050,18 +5050,18 @@ annotated_rhs_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; @@ -5069,14 +5069,14 @@ annotated_rhs_rule(Parser *p) (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // expressions: expression ((',' expression))+ ','? | expression ',' | expression @@ -5086,71 +5086,71 @@ expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // expression ((',' expression))+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; asdl_seq * b; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = expression_rule(p)) // expression && (b = _loop1_74_rule(p)) // ((',' expression))+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ',' + Token * _literal; expr_ty a; - Token * literal; if ( (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression expr_ty expression_var; @@ -5158,14 +5158,14 @@ expressions_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { - res = expression_var; + _res = expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // expression: disjunction 'if' disjunction 'else' expression | disjunction | lambdef @@ -5175,52 +5175,52 @@ expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, expression_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, expression_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // disjunction 'if' disjunction 'else' expression + Token * _keyword; + Token * _keyword_1; expr_ty a; expr_ty b; expr_ty c; - Token * keyword; - Token * keyword_1; if ( (a = disjunction_rule(p)) // disjunction && - (keyword = _PyPegen_expect_token(p, 510)) // token='if' + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (keyword_1 = _PyPegen_expect_token(p, 516)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 516)) // token='else' && (c = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_IfExp ( b , a , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_IfExp ( b , a , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // disjunction expr_ty disjunction_var; @@ -5228,10 +5228,10 @@ expression_rule(Parser *p) (disjunction_var = disjunction_rule(p)) // disjunction ) { - res = disjunction_var; + _res = disjunction_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // lambdef expr_ty lambdef_var; @@ -5239,15 +5239,15 @@ expression_rule(Parser *p) (lambdef_var = lambdef_rule(p)) // lambdef ) { - res = lambdef_var; + _res = lambdef_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, expression_type, res); - return res; + _PyPegen_insert_memo(p, _mark, expression_type, _res); + return _res; } // lambdef: 'lambda' lambda_parameters? ':' expression @@ -5257,51 +5257,51 @@ lambdef_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'lambda' lambda_parameters? ':' expression + Token * _keyword; + Token * _literal; void *a; expr_ty b; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 524)) // token='lambda' + (_keyword = _PyPegen_expect_token(p, 524)) // token='lambda' && (a = lambda_parameters_rule(p), 1) // lambda_parameters? && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Lambda ( ( a ) ? a : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Lambda ( ( a ) ? a : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_parameters: @@ -5316,8 +5316,8 @@ lambda_parameters_rule(Parser *p) if (p->error_indicator) { return NULL; } - arguments_ty res = NULL; - int mark = p->mark; + arguments_ty _res = NULL; + int _mark = p->mark; { // lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? asdl_seq* a; asdl_seq * b; @@ -5333,14 +5333,14 @@ lambda_parameters_rule(Parser *p) (d = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_slash_with_default lambda_param_with_default* lambda_star_etc? SlashWithDefault* a; @@ -5354,14 +5354,14 @@ lambda_parameters_rule(Parser *p) (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? asdl_seq * a; @@ -5375,14 +5375,14 @@ lambda_parameters_rule(Parser *p) (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_with_default+ lambda_star_etc? asdl_seq * a; @@ -5393,14 +5393,14 @@ lambda_parameters_rule(Parser *p) (b = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_star_etc StarEtc* a; @@ -5408,18 +5408,18 @@ lambda_parameters_rule(Parser *p) (a = lambda_star_etc_rule(p)) // lambda_star_etc ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_slash_no_default: @@ -5431,52 +5431,52 @@ lambda_slash_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // lambda_param_no_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; - Token * literal; - Token * literal_1; if ( (a = _loop1_81_rule(p)) // lambda_param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_no_default+ '/' &':' + Token * _literal; asdl_seq * a; - Token * literal; if ( (a = _loop1_82_rule(p)) // lambda_param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_slash_with_default: @@ -5488,58 +5488,58 @@ lambda_slash_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - SlashWithDefault* res = NULL; - int mark = p->mark; + SlashWithDefault* _res = NULL; + int _mark = p->mark; { // lambda_param_no_default* lambda_param_with_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; asdl_seq * b; - Token * literal; - Token * literal_1; if ( (a = _loop0_83_rule(p)) // lambda_param_no_default* && (b = _loop1_84_rule(p)) // lambda_param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_no_default* lambda_param_with_default+ '/' &':' + Token * _literal; asdl_seq * a; asdl_seq * b; - Token * literal; if ( (a = _loop0_85_rule(p)) // lambda_param_no_default* && (b = _loop1_86_rule(p)) // lambda_param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_star_etc: @@ -5553,15 +5553,15 @@ lambda_star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - StarEtc* res = NULL; - int mark = p->mark; + StarEtc* _res = NULL; + int _mark = p->mark; { // '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? + Token * _literal; arg_ty a; asdl_seq * b; void *c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = lambda_param_no_default_rule(p)) // lambda_param_no_default && @@ -5570,38 +5570,38 @@ lambda_star_etc_rule(Parser *p) (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { - res = _PyPegen_star_etc ( p , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' ',' lambda_param_maybe_default+ lambda_kwds? + Token * _literal; + Token * _literal_1; asdl_seq * b; void *c; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && (b = _loop1_88_rule(p)) // lambda_param_maybe_default+ && (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { - res = _PyPegen_star_etc ( p , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_kwds arg_ty a; @@ -5609,14 +5609,14 @@ lambda_star_etc_rule(Parser *p) (a = lambda_kwds_rule(p)) // lambda_kwds ) { - res = _PyPegen_star_etc ( p , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_lambda_star_etc void *invalid_lambda_star_etc_var; @@ -5624,14 +5624,14 @@ lambda_star_etc_rule(Parser *p) (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc ) { - res = invalid_lambda_star_etc_var; + _res = invalid_lambda_star_etc_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_kwds: '**' lambda_param_no_default @@ -5641,29 +5641,29 @@ lambda_kwds_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // '**' lambda_param_no_default + Token * _literal; arg_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (a = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param_no_default: lambda_param ',' | lambda_param &':' @@ -5673,25 +5673,25 @@ lambda_param_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // lambda_param ',' + Token * _literal; arg_ty a; - Token * literal; if ( (a = lambda_param_rule(p)) // lambda_param && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param &':' arg_ty a; @@ -5701,18 +5701,18 @@ lambda_param_no_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param_with_default: lambda_param default ',' | lambda_param default &':' @@ -5722,28 +5722,28 @@ lambda_param_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // lambda_param default ',' + Token * _literal; arg_ty a; expr_ty c; - Token * literal; if ( (a = lambda_param_rule(p)) // lambda_param && (c = default_rule(p)) // default && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param default &':' arg_ty a; @@ -5756,18 +5756,18 @@ lambda_param_with_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param_maybe_default: lambda_param default? ',' | lambda_param default? &':' @@ -5777,28 +5777,28 @@ lambda_param_maybe_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // lambda_param default? ',' + Token * _literal; arg_ty a; void *c; - Token * literal; if ( (a = lambda_param_rule(p)) // lambda_param && (c = default_rule(p), 1) // default? && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param default? &':' arg_ty a; @@ -5811,18 +5811,18 @@ lambda_param_maybe_default_rule(Parser *p) _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param: NAME @@ -5832,42 +5832,42 @@ lambda_param_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_arg ( a -> v . Name . id , NULL , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_arg ( a -> v . Name . id , NULL , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // disjunction: conjunction (('or' conjunction))+ | conjunction @@ -5877,18 +5877,18 @@ disjunction_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, disjunction_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, disjunction_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // conjunction (('or' conjunction))+ expr_ty a; asdl_seq * b; @@ -5898,22 +5898,22 @@ disjunction_rule(Parser *p) (b = _loop1_89_rule(p)) // (('or' conjunction))+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BoolOp ( Or , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BoolOp ( Or , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // conjunction expr_ty conjunction_var; @@ -5921,15 +5921,15 @@ disjunction_rule(Parser *p) (conjunction_var = conjunction_rule(p)) // conjunction ) { - res = conjunction_var; + _res = conjunction_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, disjunction_type, res); - return res; + _PyPegen_insert_memo(p, _mark, disjunction_type, _res); + return _res; } // conjunction: inversion (('and' inversion))+ | inversion @@ -5939,18 +5939,18 @@ conjunction_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, conjunction_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, conjunction_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // inversion (('and' inversion))+ expr_ty a; asdl_seq * b; @@ -5960,22 +5960,22 @@ conjunction_rule(Parser *p) (b = _loop1_90_rule(p)) // (('and' inversion))+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BoolOp ( And , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BoolOp ( And , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // inversion expr_ty inversion_var; @@ -5983,15 +5983,15 @@ conjunction_rule(Parser *p) (inversion_var = inversion_rule(p)) // inversion ) { - res = inversion_var; + _res = inversion_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, conjunction_type, res); - return res; + _PyPegen_insert_memo(p, _mark, conjunction_type, _res); + return _res; } // inversion: 'not' inversion | comparison @@ -6001,43 +6001,43 @@ inversion_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, inversion_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, inversion_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'not' inversion + Token * _keyword; expr_ty a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 525)) // token='not' + (_keyword = _PyPegen_expect_token(p, 525)) // token='not' && (a = inversion_rule(p)) // inversion ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( Not , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( Not , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // comparison expr_ty comparison_var; @@ -6045,15 +6045,15 @@ inversion_rule(Parser *p) (comparison_var = comparison_rule(p)) // comparison ) { - res = comparison_var; + _res = comparison_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, inversion_type, res); - return res; + _PyPegen_insert_memo(p, _mark, inversion_type, _res); + return _res; } // comparison: bitwise_or compare_op_bitwise_or_pair+ | bitwise_or @@ -6063,16 +6063,16 @@ comparison_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_or compare_op_bitwise_or_pair+ expr_ty a; asdl_seq * b; @@ -6082,22 +6082,22 @@ comparison_rule(Parser *p) (b = _loop1_91_rule(p)) // compare_op_bitwise_or_pair+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Compare ( a , CHECK ( _PyPegen_get_cmpops ( p , b ) ) , CHECK ( _PyPegen_get_exprs ( p , b ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Compare ( a , CHECK ( _PyPegen_get_cmpops ( p , b ) ) , CHECK ( _PyPegen_get_exprs ( p , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // bitwise_or expr_ty bitwise_or_var; @@ -6105,14 +6105,14 @@ comparison_rule(Parser *p) (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or ) { - res = bitwise_or_var; + _res = bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // compare_op_bitwise_or_pair: @@ -6132,18 +6132,18 @@ compare_op_bitwise_or_pair_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // eq_bitwise_or CmpopExprPair* eq_bitwise_or_var; if ( (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or ) { - res = eq_bitwise_or_var; + _res = eq_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // noteq_bitwise_or CmpopExprPair* noteq_bitwise_or_var; @@ -6151,10 +6151,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or ) { - res = noteq_bitwise_or_var; + _res = noteq_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // lte_bitwise_or CmpopExprPair* lte_bitwise_or_var; @@ -6162,10 +6162,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or ) { - res = lte_bitwise_or_var; + _res = lte_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // lt_bitwise_or CmpopExprPair* lt_bitwise_or_var; @@ -6173,10 +6173,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or ) { - res = lt_bitwise_or_var; + _res = lt_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // gte_bitwise_or CmpopExprPair* gte_bitwise_or_var; @@ -6184,10 +6184,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or ) { - res = gte_bitwise_or_var; + _res = gte_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // gt_bitwise_or CmpopExprPair* gt_bitwise_or_var; @@ -6195,10 +6195,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or ) { - res = gt_bitwise_or_var; + _res = gt_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // notin_bitwise_or CmpopExprPair* notin_bitwise_or_var; @@ -6206,10 +6206,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or ) { - res = notin_bitwise_or_var; + _res = notin_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // in_bitwise_or CmpopExprPair* in_bitwise_or_var; @@ -6217,10 +6217,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or ) { - res = in_bitwise_or_var; + _res = in_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // isnot_bitwise_or CmpopExprPair* isnot_bitwise_or_var; @@ -6228,10 +6228,10 @@ compare_op_bitwise_or_pair_rule(Parser *p) (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or ) { - res = isnot_bitwise_or_var; + _res = isnot_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // is_bitwise_or CmpopExprPair* is_bitwise_or_var; @@ -6239,14 +6239,14 @@ compare_op_bitwise_or_pair_rule(Parser *p) (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or ) { - res = is_bitwise_or_var; + _res = is_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // eq_bitwise_or: '==' bitwise_or @@ -6256,29 +6256,29 @@ eq_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '==' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 27)) // token='==' + (_literal = _PyPegen_expect_token(p, 27)) // token='==' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // noteq_bitwise_or: ('!=') bitwise_or @@ -6288,8 +6288,8 @@ noteq_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // ('!=') bitwise_or void *_tmp_92_var; expr_ty a; @@ -6299,18 +6299,18 @@ noteq_bitwise_or_rule(Parser *p) (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lte_bitwise_or: '<=' bitwise_or @@ -6320,29 +6320,29 @@ lte_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '<=' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 29)) // token='<=' + (_literal = _PyPegen_expect_token(p, 29)) // token='<=' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lt_bitwise_or: '<' bitwise_or @@ -6352,29 +6352,29 @@ lt_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '<' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 20)) // token='<' + (_literal = _PyPegen_expect_token(p, 20)) // token='<' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // gte_bitwise_or: '>=' bitwise_or @@ -6384,29 +6384,29 @@ gte_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '>=' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 30)) // token='>=' + (_literal = _PyPegen_expect_token(p, 30)) // token='>=' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // gt_bitwise_or: '>' bitwise_or @@ -6416,29 +6416,29 @@ gt_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '>' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 21)) // token='>' + (_literal = _PyPegen_expect_token(p, 21)) // token='>' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // notin_bitwise_or: 'not' 'in' bitwise_or @@ -6448,32 +6448,32 @@ notin_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'not' 'in' bitwise_or + Token * _keyword; + Token * _keyword_1; expr_ty a; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 525)) // token='not' + (_keyword = _PyPegen_expect_token(p, 525)) // token='not' && - (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // in_bitwise_or: 'in' bitwise_or @@ -6483,29 +6483,29 @@ in_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'in' bitwise_or + Token * _keyword; expr_ty a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword = _PyPegen_expect_token(p, 518)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , In , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , In , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // isnot_bitwise_or: 'is' 'not' bitwise_or @@ -6515,32 +6515,32 @@ isnot_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'is' 'not' bitwise_or + Token * _keyword; + Token * _keyword_1; expr_ty a; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 526)) // token='is' + (_keyword = _PyPegen_expect_token(p, 526)) // token='is' && - (keyword_1 = _PyPegen_expect_token(p, 525)) // token='not' + (_keyword_1 = _PyPegen_expect_token(p, 525)) // token='not' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // is_bitwise_or: 'is' bitwise_or @@ -6550,29 +6550,29 @@ is_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'is' bitwise_or + Token * _keyword; expr_ty a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 526)) // token='is' + (_keyword = _PyPegen_expect_token(p, 526)) // token='is' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Is , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Is , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6581,25 +6581,25 @@ static expr_ty bitwise_or_raw(Parser *); static expr_ty bitwise_or_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, bitwise_or_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_or_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_1 = _PyPegen_update_memo(p, mark, bitwise_or_type, res); + int tmpvar_1 = _PyPegen_update_memo(p, _mark, bitwise_or_type, _res); if (tmpvar_1) { - return res; + return _res; } - p->mark = mark; - void *raw = bitwise_or_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = bitwise_or_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty bitwise_or_raw(Parser *p) @@ -6607,44 +6607,44 @@ bitwise_or_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_or '|' bitwise_xor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = bitwise_or_rule(p)) // bitwise_or && - (literal = _PyPegen_expect_token(p, 18)) // token='|' + (_literal = _PyPegen_expect_token(p, 18)) // token='|' && (b = bitwise_xor_rule(p)) // bitwise_xor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , BitOr , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitOr , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // bitwise_xor expr_ty bitwise_xor_var; @@ -6652,14 +6652,14 @@ bitwise_or_raw(Parser *p) (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor ) { - res = bitwise_xor_var; + _res = bitwise_xor_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6668,25 +6668,25 @@ static expr_ty bitwise_xor_raw(Parser *); static expr_ty bitwise_xor_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, bitwise_xor_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_xor_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_2 = _PyPegen_update_memo(p, mark, bitwise_xor_type, res); + int tmpvar_2 = _PyPegen_update_memo(p, _mark, bitwise_xor_type, _res); if (tmpvar_2) { - return res; + return _res; } - p->mark = mark; - void *raw = bitwise_xor_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = bitwise_xor_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty bitwise_xor_raw(Parser *p) @@ -6694,44 +6694,44 @@ bitwise_xor_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_xor '^' bitwise_and + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = bitwise_xor_rule(p)) // bitwise_xor && - (literal = _PyPegen_expect_token(p, 32)) // token='^' + (_literal = _PyPegen_expect_token(p, 32)) // token='^' && (b = bitwise_and_rule(p)) // bitwise_and ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , BitXor , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitXor , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // bitwise_and expr_ty bitwise_and_var; @@ -6739,14 +6739,14 @@ bitwise_xor_raw(Parser *p) (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and ) { - res = bitwise_and_var; + _res = bitwise_and_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6755,25 +6755,25 @@ static expr_ty bitwise_and_raw(Parser *); static expr_ty bitwise_and_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, bitwise_and_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_and_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_3 = _PyPegen_update_memo(p, mark, bitwise_and_type, res); + int tmpvar_3 = _PyPegen_update_memo(p, _mark, bitwise_and_type, _res); if (tmpvar_3) { - return res; + return _res; } - p->mark = mark; - void *raw = bitwise_and_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = bitwise_and_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty bitwise_and_raw(Parser *p) @@ -6781,44 +6781,44 @@ bitwise_and_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_and '&' shift_expr + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = bitwise_and_rule(p)) // bitwise_and && - (literal = _PyPegen_expect_token(p, 19)) // token='&' + (_literal = _PyPegen_expect_token(p, 19)) // token='&' && (b = shift_expr_rule(p)) // shift_expr ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , BitAnd , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitAnd , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // shift_expr expr_ty shift_expr_var; @@ -6826,14 +6826,14 @@ bitwise_and_raw(Parser *p) (shift_expr_var = shift_expr_rule(p)) // shift_expr ) { - res = shift_expr_var; + _res = shift_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6842,25 +6842,25 @@ static expr_ty shift_expr_raw(Parser *); static expr_ty shift_expr_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, shift_expr_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, shift_expr_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_4 = _PyPegen_update_memo(p, mark, shift_expr_type, res); + int tmpvar_4 = _PyPegen_update_memo(p, _mark, shift_expr_type, _res); if (tmpvar_4) { - return res; + return _res; } - p->mark = mark; - void *raw = shift_expr_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = shift_expr_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty shift_expr_raw(Parser *p) @@ -6868,73 +6868,73 @@ shift_expr_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // shift_expr '<<' sum + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = shift_expr_rule(p)) // shift_expr && - (literal = _PyPegen_expect_token(p, 33)) // token='<<' + (_literal = _PyPegen_expect_token(p, 33)) // token='<<' && (b = sum_rule(p)) // sum ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , LShift , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , LShift , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // shift_expr '>>' sum + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = shift_expr_rule(p)) // shift_expr && - (literal = _PyPegen_expect_token(p, 34)) // token='>>' + (_literal = _PyPegen_expect_token(p, 34)) // token='>>' && (b = sum_rule(p)) // sum ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , RShift , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , RShift , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // sum expr_ty sum_var; @@ -6942,14 +6942,14 @@ shift_expr_raw(Parser *p) (sum_var = sum_rule(p)) // sum ) { - res = sum_var; + _res = sum_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6958,25 +6958,25 @@ static expr_ty sum_raw(Parser *); static expr_ty sum_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, sum_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, sum_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_5 = _PyPegen_update_memo(p, mark, sum_type, res); + int tmpvar_5 = _PyPegen_update_memo(p, _mark, sum_type, _res); if (tmpvar_5) { - return res; + return _res; } - p->mark = mark; - void *raw = sum_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = sum_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty sum_raw(Parser *p) @@ -6984,73 +6984,73 @@ sum_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // sum '+' term + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = sum_rule(p)) // sum && - (literal = _PyPegen_expect_token(p, 14)) // token='+' + (_literal = _PyPegen_expect_token(p, 14)) // token='+' && (b = term_rule(p)) // term ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Add , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Add , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // sum '-' term + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = sum_rule(p)) // sum && - (literal = _PyPegen_expect_token(p, 15)) // token='-' + (_literal = _PyPegen_expect_token(p, 15)) // token='-' && (b = term_rule(p)) // term ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Sub , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Sub , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term expr_ty term_var; @@ -7058,14 +7058,14 @@ sum_raw(Parser *p) (term_var = term_rule(p)) // term ) { - res = term_var; + _res = term_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -7080,25 +7080,25 @@ static expr_ty term_raw(Parser *); static expr_ty term_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, term_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, term_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_6 = _PyPegen_update_memo(p, mark, term_type, res); + int tmpvar_6 = _PyPegen_update_memo(p, _mark, term_type, _res); if (tmpvar_6) { - return res; + return _res; } - p->mark = mark; - void *raw = term_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = term_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty term_raw(Parser *p) @@ -7106,160 +7106,160 @@ term_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // term '*' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Mult , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Mult , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '/' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 17)) // token='/' + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Div , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Div , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '//' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 47)) // token='//' + (_literal = _PyPegen_expect_token(p, 47)) // token='//' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , FloorDiv , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , FloorDiv , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '%' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 24)) // token='%' + (_literal = _PyPegen_expect_token(p, 24)) // token='%' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Mod , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Mod , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '@' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 49)) // token='@' + (_literal = _PyPegen_expect_token(p, 49)) // token='@' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "The '@' operator is" , _Py_BinOp ( a , MatMult , b , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "The '@' operator is" , _Py_BinOp ( a , MatMult , b , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // factor expr_ty factor_var; @@ -7267,14 +7267,14 @@ term_raw(Parser *p) (factor_var = factor_rule(p)) // factor ) { - res = factor_var; + _res = factor_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // factor: '+' factor | '-' factor | '~' factor | power @@ -7284,95 +7284,95 @@ factor_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, factor_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, factor_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '+' factor + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 14)) // token='+' + (_literal = _PyPegen_expect_token(p, 14)) // token='+' && (a = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( UAdd , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( UAdd , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '-' factor + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 15)) // token='-' + (_literal = _PyPegen_expect_token(p, 15)) // token='-' && (a = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( USub , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( USub , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '~' factor + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 31)) // token='~' + (_literal = _PyPegen_expect_token(p, 31)) // token='~' && (a = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( Invert , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( Invert , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // power expr_ty power_var; @@ -7380,15 +7380,15 @@ factor_rule(Parser *p) (power_var = power_rule(p)) // power ) { - res = power_var; + _res = power_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, factor_type, res); - return res; + _PyPegen_insert_memo(p, _mark, factor_type, _res); + return _res; } // power: await_primary '**' factor | await_primary @@ -7398,44 +7398,44 @@ power_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // await_primary '**' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = await_primary_rule(p)) // await_primary && - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Pow , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Pow , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // await_primary expr_ty await_primary_var; @@ -7443,14 +7443,14 @@ power_rule(Parser *p) (await_primary_var = await_primary_rule(p)) // await_primary ) { - res = await_primary_var; + _res = await_primary_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // await_primary: AWAIT primary | primary @@ -7460,18 +7460,18 @@ await_primary_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, await_primary_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, await_primary_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // AWAIT primary expr_ty a; Token * await_var; @@ -7481,22 +7481,22 @@ await_primary_rule(Parser *p) (a = primary_rule(p)) // primary ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Await expressions are" , _Py_Await ( a , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Await expressions are" , _Py_Await ( a , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary expr_ty primary_var; @@ -7504,15 +7504,15 @@ await_primary_rule(Parser *p) (primary_var = primary_rule(p)) // primary ) { - res = primary_var; + _res = primary_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, await_primary_type, res); - return res; + _PyPegen_insert_memo(p, _mark, await_primary_type, _res); + return _res; } // Left-recursive @@ -7526,25 +7526,25 @@ static expr_ty primary_raw(Parser *); static expr_ty primary_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, primary_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, primary_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_7 = _PyPegen_update_memo(p, mark, primary_type, res); + int tmpvar_7 = _PyPegen_update_memo(p, _mark, primary_type, _res); if (tmpvar_7) { - return res; + return _res; } - p->mark = mark; - void *raw = primary_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = primary_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty primary_raw(Parser *p) @@ -7552,44 +7552,44 @@ primary_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // primary '.' NAME + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary genexp expr_ty a; @@ -7600,86 +7600,86 @@ primary_raw(Parser *p) (b = genexp_rule(p)) // genexp ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary '(' arguments? ')' + Token * _literal; + Token * _literal_1; expr_ty a; void *b; - Token * literal; - Token * literal_1; if ( (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (b = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary '[' slices ']' + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // atom expr_ty atom_var; @@ -7687,14 +7687,14 @@ primary_raw(Parser *p) (atom_var = atom_rule(p)) // atom ) { - res = atom_var; + _res = atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slices: slice !',' | ','.slice+ ','? @@ -7704,16 +7704,16 @@ slices_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // slice !',' expr_ty a; if ( @@ -7722,45 +7722,45 @@ slices_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.slice+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_93_rule(p)) // ','.slice+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slice: expression? ':' expression? [':' expression?] | expression @@ -7770,47 +7770,47 @@ slice_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // expression? ':' expression? [':' expression?] + Token * _literal; void *a; void *b; void *c; - Token * literal; if ( (a = expression_rule(p), 1) // expression? && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p), 1) // expression? && (c = _tmp_95_rule(p), 1) // [':' expression?] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Slice ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Slice ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression expr_ty a; @@ -7818,18 +7818,18 @@ slice_rule(Parser *p) (a = expression_rule(p)) // expression ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // atom: @@ -7850,110 +7850,110 @@ atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty name_var; if ( (name_var = _PyPegen_name_token(p)) // NAME ) { - res = name_var; + _res = name_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'True' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 527)) // token='True' + (_keyword = _PyPegen_expect_token(p, 527)) // token='True' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_True , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_True , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'False' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 528)) // token='False' + (_keyword = _PyPegen_expect_token(p, 528)) // token='False' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_False , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_False , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'None' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 529)) // token='None' + (_keyword = _PyPegen_expect_token(p, 529)) // token='None' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_None , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_None , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '__new_parser__' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' + (_keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' ) { - res = RAISE_SYNTAX_ERROR ( "You found it!" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "You found it!" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &STRING strings expr_ty strings_var; @@ -7963,10 +7963,10 @@ atom_rule(Parser *p) (strings_var = strings_rule(p)) // strings ) { - res = strings_var; + _res = strings_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // NUMBER expr_ty number_var; @@ -7974,10 +7974,10 @@ atom_rule(Parser *p) (number_var = _PyPegen_number_token(p)) // NUMBER ) { - res = number_var; + _res = number_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'(' (tuple | group | genexp) void *_tmp_96_var; @@ -7987,10 +7987,10 @@ atom_rule(Parser *p) (_tmp_96_var = _tmp_96_rule(p)) // tuple | group | genexp ) { - res = _tmp_96_var; + _res = _tmp_96_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'[' (list | listcomp) void *_tmp_97_var; @@ -8000,10 +8000,10 @@ atom_rule(Parser *p) (_tmp_97_var = _tmp_97_rule(p)) // list | listcomp ) { - res = _tmp_97_var; + _res = _tmp_97_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'{' (dict | set | dictcomp | setcomp) void *_tmp_98_var; @@ -8013,37 +8013,37 @@ atom_rule(Parser *p) (_tmp_98_var = _tmp_98_rule(p)) // dict | set | dictcomp | setcomp ) { - res = _tmp_98_var; + _res = _tmp_98_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // '...' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 52)) // token='...' + (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_Ellipsis , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_Ellipsis , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // strings: STRING+ @@ -8053,29 +8053,29 @@ strings_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, strings_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, strings_type, &_res)) + return _res; + int _mark = p->mark; { // STRING+ asdl_seq * a; if ( (a = _loop1_99_rule(p)) // STRING+ ) { - res = _PyPegen_concatenate_strings ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_concatenate_strings ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, strings_type, res); - return res; + _PyPegen_insert_memo(p, _mark, strings_type, _res); + return _res; } // list: '[' star_named_expressions? ']' @@ -8085,48 +8085,48 @@ list_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '[' star_named_expressions? ']' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (a = star_named_expressions_rule(p), 1) // star_named_expressions? && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // listcomp: '[' named_expression for_if_clauses ']' | invalid_comprehension @@ -8136,47 +8136,47 @@ listcomp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '[' named_expression for_if_clauses ']' + Token * _literal; + Token * _literal_1; expr_ty a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (a = named_expression_rule(p)) // named_expression && (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ListComp ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ListComp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_comprehension void *invalid_comprehension_var; @@ -8184,14 +8184,14 @@ listcomp_rule(Parser *p) (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { - res = invalid_comprehension_var; + _res = invalid_comprehension_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // tuple: '(' [star_named_expression ',' star_named_expressions?] ')' @@ -8201,48 +8201,48 @@ tuple_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '(' [star_named_expression ',' star_named_expressions?] ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = _tmp_100_rule(p), 1) // [star_named_expression ',' star_named_expressions?] && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // group: '(' (yield_expr | named_expression) ')' @@ -8252,32 +8252,32 @@ group_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // '(' (yield_expr | named_expression) ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = _tmp_101_rule(p)) // yield_expr | named_expression && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // genexp: '(' expression for_if_clauses ')' | invalid_comprehension @@ -8287,47 +8287,47 @@ genexp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '(' expression for_if_clauses ')' + Token * _literal; + Token * _literal_1; expr_ty a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = expression_rule(p)) // expression && (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_GeneratorExp ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_GeneratorExp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_comprehension void *invalid_comprehension_var; @@ -8335,14 +8335,14 @@ genexp_rule(Parser *p) (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { - res = invalid_comprehension_var; + _res = invalid_comprehension_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // set: '{' expressions_list '}' @@ -8352,48 +8352,48 @@ set_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' expressions_list '}' + Token * _literal; + Token * _literal_1; asdl_seq* a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) // token='{' + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && (a = expressions_list_rule(p)) // expressions_list && - (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Set ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Set ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // setcomp: '{' expression for_if_clauses '}' | invalid_comprehension @@ -8403,47 +8403,47 @@ setcomp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' expression for_if_clauses '}' + Token * _literal; + Token * _literal_1; expr_ty a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) // token='{' + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && (a = expression_rule(p)) // expression && (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_SetComp ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_SetComp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_comprehension void *invalid_comprehension_var; @@ -8451,14 +8451,14 @@ setcomp_rule(Parser *p) (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { - res = invalid_comprehension_var; + _res = invalid_comprehension_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dict: '{' kvpairs? '}' @@ -8468,48 +8468,48 @@ dict_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' kvpairs? '}' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) // token='{' + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && (a = kvpairs_rule(p), 1) // kvpairs? && - (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Dict ( CHECK ( _PyPegen_get_keys ( p , a ) ) , CHECK ( _PyPegen_get_values ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Dict ( CHECK ( _PyPegen_get_keys ( p , a ) ) , CHECK ( _PyPegen_get_values ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dictcomp: '{' kvpair for_if_clauses '}' @@ -8519,51 +8519,51 @@ dictcomp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' kvpair for_if_clauses '}' + Token * _literal; + Token * _literal_1; KeyValuePair* a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) // token='{' + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && (a = kvpair_rule(p)) // kvpair && (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_DictComp ( a -> key , a -> value , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_DictComp ( a -> key , a -> value , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kvpairs: ','.kvpair+ ','? @@ -8573,30 +8573,30 @@ kvpairs_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.kvpair+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_102_rule(p)) // ','.kvpair+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kvpair: '**' bitwise_or | expression ':' expression @@ -8606,50 +8606,50 @@ kvpair_rule(Parser *p) if (p->error_indicator) { return NULL; } - KeyValuePair* res = NULL; - int mark = p->mark; + KeyValuePair* _res = NULL; + int _mark = p->mark; { // '**' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_key_value_pair ( p , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_key_value_pair ( p , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ':' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p)) // expression ) { - res = _PyPegen_key_value_pair ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_key_value_pair ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // for_if_clauses: for_if_clause+ @@ -8659,22 +8659,22 @@ for_if_clauses_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // for_if_clause+ asdl_seq * _loop1_104_var; if ( (_loop1_104_var = _loop1_104_rule(p)) // for_if_clause+ ) { - res = _loop1_104_var; + _res = _loop1_104_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // for_if_clause: @@ -8686,68 +8686,68 @@ for_if_clause_rule(Parser *p) if (p->error_indicator) { return NULL; } - comprehension_ty res = NULL; - int mark = p->mark; + comprehension_ty _res = NULL; + int _mark = p->mark; { // ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* + Token * _keyword; + Token * _keyword_1; expr_ty a; Token * async_var; expr_ty b; asdl_seq * c; - Token * keyword; - Token * keyword_1; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 517)) // token='for' + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && (b = disjunction_rule(p)) // disjunction && (c = _loop0_105_rule(p)) // (('if' disjunction))* ) { - res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); - if (res == NULL && PyErr_Occurred()) { + _res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'for' star_targets 'in' disjunction (('if' disjunction))* + Token * _keyword; + Token * _keyword_1; expr_ty a; expr_ty b; asdl_seq * c; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 517)) // token='for' + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && (b = disjunction_rule(p)) // disjunction && (c = _loop0_106_rule(p)) // (('if' disjunction))* ) { - res = _Py_comprehension ( a , b , c , 0 , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_comprehension ( a , b , c , 0 , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // yield_expr: 'yield' 'from' expression | 'yield' star_expressions? @@ -8757,74 +8757,74 @@ yield_expr_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'yield' 'from' expression + Token * _keyword; + Token * _keyword_1; expr_ty a; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 504)) // token='yield' + (_keyword = _PyPegen_expect_token(p, 504)) // token='yield' && - (keyword_1 = _PyPegen_expect_token(p, 514)) // token='from' + (_keyword_1 = _PyPegen_expect_token(p, 514)) // token='from' && (a = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_YieldFrom ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_YieldFrom ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'yield' star_expressions? + Token * _keyword; void *a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 504)) // token='yield' + (_keyword = _PyPegen_expect_token(p, 504)) // token='yield' && (a = star_expressions_rule(p), 1) // star_expressions? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Yield ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Yield ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // arguments: args ','? &')' | incorrect_arguments @@ -8834,30 +8834,30 @@ arguments_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, arguments_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, arguments_type, &_res)) + return _res; + int _mark = p->mark; { // args ','? &')' + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = args_rule(p)) // args && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // incorrect_arguments void *incorrect_arguments_var; @@ -8865,15 +8865,15 @@ arguments_rule(Parser *p) (incorrect_arguments_var = incorrect_arguments_rule(p)) // incorrect_arguments ) { - res = incorrect_arguments_var; + _res = incorrect_arguments_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, arguments_type, res); - return res; + _PyPegen_insert_memo(p, _mark, arguments_type, _res); + return _res; } // args: starred_expression [',' args] | kwargs | named_expression [',' args] @@ -8883,16 +8883,16 @@ args_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // starred_expression [',' args] expr_ty a; void *b; @@ -8902,22 +8902,22 @@ args_rule(Parser *p) (b = _tmp_107_rule(p), 1) // [',' args] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // kwargs asdl_seq* a; @@ -8925,22 +8925,22 @@ args_rule(Parser *p) (a = kwargs_rule(p)) // kwargs ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( _PyPegen_dummy_name ( p ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_extract_starred_exprs ( p , a ) ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_delete_starred_exprs ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_extract_starred_exprs ( p , a ) ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_delete_starred_exprs ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // named_expression [',' args] expr_ty a; @@ -8951,26 +8951,26 @@ args_rule(Parser *p) (b = _tmp_108_rule(p), 1) // [',' args] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kwargs: @@ -8983,28 +8983,28 @@ kwargs_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ + Token * _literal; asdl_seq * a; asdl_seq * b; - Token * literal; if ( (a = _gather_109_rule(p)) // ','.kwarg_or_starred+ && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (b = _gather_111_rule(p)) // ','.kwarg_or_double_starred+ ) { - res = _PyPegen_join_sequences ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_join_sequences ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.kwarg_or_starred+ asdl_seq * _gather_113_var; @@ -9012,10 +9012,10 @@ kwargs_rule(Parser *p) (_gather_113_var = _gather_113_rule(p)) // ','.kwarg_or_starred+ ) { - res = _gather_113_var; + _res = _gather_113_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.kwarg_or_double_starred+ asdl_seq * _gather_115_var; @@ -9023,14 +9023,14 @@ kwargs_rule(Parser *p) (_gather_115_var = _gather_115_rule(p)) // ','.kwarg_or_double_starred+ ) { - res = _gather_115_var; + _res = _gather_115_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // starred_expression: '*' expression @@ -9040,45 +9040,45 @@ starred_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kwarg_or_starred: NAME '=' expression | starred_expression | invalid_kwarg @@ -9088,44 +9088,44 @@ kwarg_or_starred_rule(Parser *p) if (p->error_indicator) { return NULL; } - KeywordOrStarred* res = NULL; - int mark = p->mark; + KeywordOrStarred* _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME '=' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // starred_expression expr_ty a; @@ -9133,14 +9133,14 @@ kwarg_or_starred_rule(Parser *p) (a = starred_expression_rule(p)) // starred_expression ) { - res = _PyPegen_keyword_or_starred ( p , a , 0 ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_keyword_or_starred ( p , a , 0 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_kwarg void *invalid_kwarg_var; @@ -9148,14 +9148,14 @@ kwarg_or_starred_rule(Parser *p) (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg ) { - res = invalid_kwarg_var; + _res = invalid_kwarg_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kwarg_or_double_starred: NAME '=' expression | '**' expression | invalid_kwarg @@ -9165,70 +9165,70 @@ kwarg_or_double_starred_rule(Parser *p) if (p->error_indicator) { return NULL; } - KeywordOrStarred* res = NULL; - int mark = p->mark; + KeywordOrStarred* _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME '=' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && (a = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( NULL , a , EXTRA ) ) , 1 ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( NULL , a , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_kwarg void *invalid_kwarg_var; @@ -9236,14 +9236,14 @@ kwarg_or_double_starred_rule(Parser *p) (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg ) { - res = invalid_kwarg_var; + _res = invalid_kwarg_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_targets: star_target !',' | star_target ((',' star_target))* ','? @@ -9253,16 +9253,16 @@ star_targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // star_target !',' expr_ty a; if ( @@ -9271,48 +9271,48 @@ star_targets_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_target ((',' star_target))* ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; asdl_seq * b; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = star_target_rule(p)) // star_target && (b = _loop0_117_rule(p)) // ((',' star_target))* && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_targets_seq: ','.star_target+ ','? @@ -9322,30 +9322,30 @@ star_targets_seq_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.star_target+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_118_rule(p)) // ','.star_target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_target: @@ -9359,108 +9359,108 @@ star_target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, star_target_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, star_target_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' (!'*' star_target) + Token * _literal; void *a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (a = _tmp_120_rule(p)) // !'*' star_target ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_atom expr_ty star_atom_var; @@ -9468,15 +9468,15 @@ star_target_rule(Parser *p) (star_atom_var = star_atom_rule(p)) // star_atom ) { - res = star_atom_var; + _res = star_atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, star_target_type, res); - return res; + _PyPegen_insert_memo(p, _mark, star_target_type, _res); + return _res; } // star_atom: @@ -9490,113 +9490,113 @@ star_atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' star_target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = star_target_rule(p)) // star_target && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' star_targets_seq? ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = star_targets_seq_rule(p), 1) // star_targets_seq? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' star_targets_seq? ']' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (a = star_targets_seq_rule(p), 1) // star_targets_seq? && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( a , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // inside_paren_ann_assign_target: @@ -9609,18 +9609,18 @@ inside_paren_ann_assign_target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // ann_assign_subscript_attribute_target expr_ty ann_assign_subscript_attribute_target_var; if ( (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target ) { - res = ann_assign_subscript_attribute_target_var; + _res = ann_assign_subscript_attribute_target_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // NAME expr_ty a; @@ -9628,39 +9628,39 @@ inside_paren_ann_assign_target_rule(Parser *p) (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' inside_paren_ann_assign_target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // ann_assign_subscript_attribute_target: @@ -9672,84 +9672,84 @@ ann_assign_subscript_attribute_target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // del_targets: ','.del_target+ ','? @@ -9759,30 +9759,30 @@ del_targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.del_target+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_121_rule(p)) // ','.del_target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // del_target: @@ -9795,82 +9795,82 @@ del_target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, del_target_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, del_target_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // del_t_atom expr_ty del_t_atom_var; @@ -9878,15 +9878,15 @@ del_target_rule(Parser *p) (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom ) { - res = del_t_atom_var; + _res = del_t_atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, del_target_type, res); - return res; + _PyPegen_insert_memo(p, _mark, del_target_type, _res); + return _res; } // del_t_atom: NAME | '(' del_target ')' | '(' del_targets? ')' | '[' del_targets? ']' @@ -9896,113 +9896,113 @@ del_t_atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Del ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Del ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' del_target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = del_target_rule(p)) // del_target && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = _PyPegen_set_expr_context ( p , a , Del ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Del ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' del_targets? ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = del_targets_rule(p), 1) // del_targets? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' del_targets? ']' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (a = del_targets_rule(p), 1) // del_targets? && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( a , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // targets: ','.target+ ','? @@ -10012,30 +10012,30 @@ targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.target+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (a = _gather_123_rule(p)) // ','.target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // target: @@ -10048,82 +10048,82 @@ target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, target_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, target_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_atom expr_ty t_atom_var; @@ -10131,15 +10131,15 @@ target_rule(Parser *p) (t_atom_var = t_atom_rule(p)) // t_atom ) { - res = t_atom_var; + _res = t_atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, target_type, res); - return res; + _PyPegen_insert_memo(p, _mark, target_type, _res); + return _res; } // Left-recursive @@ -10153,25 +10153,25 @@ static expr_ty t_primary_raw(Parser *); static expr_ty t_primary_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, t_primary_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, t_primary_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_8 = _PyPegen_update_memo(p, mark, t_primary_type, res); + int tmpvar_8 = _PyPegen_update_memo(p, _mark, t_primary_type, _res); if (tmpvar_8) { - return res; + return _res; } - p->mark = mark; - void *raw = t_primary_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = t_primary_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty t_primary_raw(Parser *p) @@ -10179,80 +10179,80 @@ t_primary_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME &t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' &t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary genexp &t_lookahead expr_ty a; @@ -10265,56 +10265,56 @@ t_primary_raw(Parser *p) _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '(' arguments? ')' &t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; void *b; - Token * literal; - Token * literal_1; if ( (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (b = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // atom &t_lookahead expr_ty a; @@ -10324,18 +10324,18 @@ t_primary_raw(Parser *p) _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // t_lookahead: '(' | '[' | '.' @@ -10345,44 +10345,44 @@ t_lookahead_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '(' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '.' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // t_atom: NAME | '(' target ')' | '(' targets? ')' | '[' targets? ']' @@ -10392,113 +10392,113 @@ t_atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (a = target_rule(p)) // target && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' targets? ')' + Token * _literal; + Token * _literal_1; void *b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (b = targets_rule(p), 1) // targets? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' targets? ']' + Token * _literal; + Token * _literal_1; void *b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && (b = targets_rule(p), 1) // targets? && - (literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // incorrect_arguments: @@ -10511,78 +10511,78 @@ incorrect_arguments_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // args ',' '*' + Token * _literal; + Token * _literal_1; expr_ty args_var; - Token * literal; - Token * literal_1; if ( (args_var = args_rule(p)) // args && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) // token='*' + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' ) { - res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression for_if_clauses ',' [args | expression for_if_clauses] + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; asdl_seq* for_if_clauses_var; - Token * literal; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (expression_var = expression_rule(p)) // expression && (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] + (_opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] ) { - res = RAISE_SYNTAX_ERROR ( "Generator expression must be parenthesized" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "Generator expression must be parenthesized" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // args ',' args + Token * _literal; expr_ty a; expr_ty args_var; - Token * literal; if ( (a = args_rule(p)) // args && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (args_var = args_rule(p)) // args ) { - res = _PyPegen_arguments_parsing_error ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_arguments_parsing_error ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_kwarg: expression '=' @@ -10592,29 +10592,29 @@ invalid_kwarg_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // expression '=' + Token * _literal; expr_ty expression_var; - Token * literal; if ( (expression_var = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - res = RAISE_SYNTAX_ERROR ( "expression cannot contain assignment, perhaps you meant \"==\"?" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "expression cannot contain assignment, perhaps you meant \"==\"?" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_named_expression: expression ':=' expression @@ -10624,32 +10624,32 @@ invalid_named_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // expression ':=' expression + Token * _literal; expr_ty a; expr_ty expression_var; - Token * literal; if ( (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 53)) // token=':=' + (_literal = _PyPegen_expect_token(p, 53)) // token=':=' && (expression_var = expression_rule(p)) // expression ) { - res = RAISE_SYNTAX_ERROR ( "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_assignment: @@ -10663,68 +10663,68 @@ invalid_assignment_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // list ':' + Token * _literal; expr_ty list_var; - Token * literal; if ( (list_var = list_rule(p)) // list && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = RAISE_SYNTAX_ERROR ( "only single target (not list) can be annotated" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "only single target (not list) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // tuple ':' - Token * literal; + Token * _literal; expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = RAISE_SYNTAX_ERROR ( "only single target (not tuple) can be annotated" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "only single target (not tuple) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ':' expression ['=' annotated_rhs] + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; expr_ty expression_var_1; - Token * literal; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( (expression_var = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (expression_var_1 = expression_rule(p)) // expression && - (opt_var = _tmp_126_rule(p), 1) // ['=' annotated_rhs] + (_opt_var = _tmp_126_rule(p), 1) // ['=' annotated_rhs] ) { - res = RAISE_SYNTAX_ERROR ( "illegal target for annotation" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "illegal target for annotation" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ('=' | augassign) (yield_expr | star_expressions) void *_tmp_127_var; @@ -10738,18 +10738,18 @@ invalid_assignment_rule(Parser *p) (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions ) { - res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_block: NEWLINE !INDENT @@ -10759,8 +10759,8 @@ invalid_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // NEWLINE !INDENT Token * newline_var; if ( @@ -10769,18 +10769,18 @@ invalid_block_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT ) { - res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_comprehension: ('[' | '(' | '{') '*' expression for_if_clauses @@ -10790,35 +10790,35 @@ invalid_comprehension_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ('[' | '(' | '{') '*' expression for_if_clauses + Token * _literal; void *_tmp_129_var; expr_ty expression_var; asdl_seq* for_if_clauses_var; - Token * literal; if ( (_tmp_129_var = _tmp_129_rule(p)) // '[' | '(' | '{' && - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - res = RAISE_SYNTAX_ERROR ( "iterable unpacking cannot be used in comprehension" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "iterable unpacking cannot be used in comprehension" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_parameters: @@ -10829,8 +10829,8 @@ invalid_parameters_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // param_no_default* (slash_with_default | param_with_default+) param_no_default asdl_seq * _loop0_130_var; void *_tmp_131_var; @@ -10843,18 +10843,18 @@ invalid_parameters_rule(Parser *p) (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_star_etc: '*' (')' | ',' (')' | '**')) @@ -10864,29 +10864,29 @@ invalid_star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '*' (')' | ',' (')' | '**')) + Token * _literal; void *_tmp_132_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (_tmp_132_var = _tmp_132_rule(p)) // ')' | ',' (')' | '**') ) { - res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_lambda_star_etc: '*' (':' | ',' (':' | '**')) @@ -10896,29 +10896,29 @@ invalid_lambda_star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '*' (':' | ',' (':' | '**')) + Token * _literal; void *_tmp_133_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) // token='*' + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (_tmp_133_var = _tmp_133_rule(p)) // ':' | ',' (':' | '**') ) { - res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_double_type_comments: TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT @@ -10928,8 +10928,8 @@ invalid_double_type_comments_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT Token * indent_var; Token * newline_var; @@ -10948,18 +10948,18 @@ invalid_double_type_comments_rule(Parser *p) (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { - res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_1: NEWLINE @@ -10969,46 +10969,46 @@ _loop0_1_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // NEWLINE Token * newline_var; while ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = newline_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = newline_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_1"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_1_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_1_type, _seq); + return _seq; } // _loop0_2: NEWLINE @@ -11018,46 +11018,46 @@ _loop0_2_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // NEWLINE Token * newline_var; while ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = newline_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = newline_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_2"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_2_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_2_type, _seq); + return _seq; } // _loop0_4: ',' expression @@ -11067,54 +11067,54 @@ _loop0_4_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_4"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_4_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_4_type, _seq); + return _seq; } // _gather_3: expression _loop0_4 @@ -11124,8 +11124,8 @@ _gather_3_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_4 expr_ty elem; asdl_seq * seq; @@ -11135,14 +11135,14 @@ _gather_3_rule(Parser *p) (seq = _loop0_4_rule(p)) // _loop0_4 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_6: ',' expression @@ -11152,54 +11152,54 @@ _loop0_6_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_6"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_6_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_6_type, _seq); + return _seq; } // _gather_5: expression _loop0_6 @@ -11209,8 +11209,8 @@ _gather_5_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_6 expr_ty elem; asdl_seq * seq; @@ -11220,14 +11220,14 @@ _gather_5_rule(Parser *p) (seq = _loop0_6_rule(p)) // _loop0_6 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_8: ',' expression @@ -11237,54 +11237,54 @@ _loop0_8_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_8"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_8_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_8_type, _seq); + return _seq; } // _gather_7: expression _loop0_8 @@ -11294,8 +11294,8 @@ _gather_7_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_8 expr_ty elem; asdl_seq * seq; @@ -11305,14 +11305,14 @@ _gather_7_rule(Parser *p) (seq = _loop0_8_rule(p)) // _loop0_8 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_10: ',' expression @@ -11322,54 +11322,54 @@ _loop0_10_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_10"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_10_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_10_type, _seq); + return _seq; } // _gather_9: expression _loop0_10 @@ -11379,8 +11379,8 @@ _gather_9_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_10 expr_ty elem; asdl_seq * seq; @@ -11390,14 +11390,14 @@ _gather_9_rule(Parser *p) (seq = _loop0_10_rule(p)) // _loop0_10 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_11: statement @@ -11407,50 +11407,50 @@ _loop1_11_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // statement asdl_seq* statement_var; while ( (statement_var = statement_rule(p)) // statement ) { - res = statement_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = statement_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_11"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_11_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_11_type, _seq); + return _seq; } // _loop0_13: ';' small_stmt @@ -11460,54 +11460,54 @@ _loop0_13_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ';' small_stmt + Token * _literal; stmt_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 13)) // token=';' + (_literal = _PyPegen_expect_token(p, 13)) // token=';' && (elem = small_stmt_rule(p)) // small_stmt ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_13"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_13_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_13_type, _seq); + return _seq; } // _gather_12: small_stmt _loop0_13 @@ -11517,8 +11517,8 @@ _gather_12_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // small_stmt _loop0_13 stmt_ty elem; asdl_seq * seq; @@ -11528,14 +11528,14 @@ _gather_12_rule(Parser *p) (seq = _loop0_13_rule(p)) // _loop0_13 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_14: 'import' | 'from' @@ -11545,33 +11545,33 @@ _tmp_14_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'import' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 513)) // token='import' + (_keyword = _PyPegen_expect_token(p, 513)) // token='import' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'from' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 514)) // token='from' + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_15: 'def' | '@' | ASYNC @@ -11581,29 +11581,29 @@ _tmp_15_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'def' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 522)) // token='def' + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // '@' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 49)) // token='@' + (_literal = _PyPegen_expect_token(p, 49)) // token='@' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC Token * async_var; @@ -11611,14 +11611,14 @@ _tmp_15_rule(Parser *p) (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - res = async_var; + _res = async_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_16: 'class' | '@' @@ -11628,33 +11628,33 @@ _tmp_16_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'class' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 523)) // token='class' + (_keyword = _PyPegen_expect_token(p, 523)) // token='class' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // '@' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 49)) // token='@' + (_literal = _PyPegen_expect_token(p, 49)) // token='@' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_17: 'with' | ASYNC @@ -11664,18 +11664,18 @@ _tmp_17_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'with' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 519)) // token='with' + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC Token * async_var; @@ -11683,14 +11683,14 @@ _tmp_17_rule(Parser *p) (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - res = async_var; + _res = async_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_18: 'for' | ASYNC @@ -11700,18 +11700,18 @@ _tmp_18_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'for' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 517)) // token='for' + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC Token * async_var; @@ -11719,14 +11719,14 @@ _tmp_18_rule(Parser *p) (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - res = async_var; + _res = async_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_19: '=' annotated_rhs @@ -11736,29 +11736,29 @@ _tmp_19_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' annotated_rhs + Token * _literal; expr_ty d; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (d = annotated_rhs_rule(p)) // annotated_rhs ) { - res = d; - if (res == NULL && PyErr_Occurred()) { + _res = d; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_20: '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target @@ -11768,28 +11768,28 @@ _tmp_20_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '(' inside_paren_ann_assign_target ')' + Token * _literal; + Token * _literal_1; expr_ty b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (b = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = b; - if (res == NULL && PyErr_Occurred()) { + _res = b; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ann_assign_subscript_attribute_target expr_ty ann_assign_subscript_attribute_target_var; @@ -11797,14 +11797,14 @@ _tmp_20_rule(Parser *p) (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target ) { - res = ann_assign_subscript_attribute_target_var; + _res = ann_assign_subscript_attribute_target_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_21: '=' annotated_rhs @@ -11814,29 +11814,29 @@ _tmp_21_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' annotated_rhs + Token * _literal; expr_ty d; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (d = annotated_rhs_rule(p)) // annotated_rhs ) { - res = d; - if (res == NULL && PyErr_Occurred()) { + _res = d; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_22: (star_targets '=') @@ -11846,50 +11846,50 @@ _loop1_22_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (star_targets '=') void *_tmp_134_var; while ( (_tmp_134_var = _tmp_134_rule(p)) // star_targets '=' ) { - res = _tmp_134_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_134_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_22"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_22_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_22_type, _seq); + return _seq; } // _tmp_23: yield_expr | star_expressions @@ -11899,18 +11899,18 @@ _tmp_23_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; @@ -11918,14 +11918,14 @@ _tmp_23_rule(Parser *p) (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_24: yield_expr | star_expressions @@ -11935,18 +11935,18 @@ _tmp_24_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; @@ -11954,14 +11954,14 @@ _tmp_24_rule(Parser *p) (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_26: ',' NAME @@ -11971,54 +11971,54 @@ _loop0_26_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' NAME + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = _PyPegen_name_token(p)) // NAME ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_26"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_26_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_26_type, _seq); + return _seq; } // _gather_25: NAME _loop0_26 @@ -12028,8 +12028,8 @@ _gather_25_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // NAME _loop0_26 expr_ty elem; asdl_seq * seq; @@ -12039,14 +12039,14 @@ _gather_25_rule(Parser *p) (seq = _loop0_26_rule(p)) // _loop0_26 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_28: ',' NAME @@ -12056,54 +12056,54 @@ _loop0_28_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' NAME + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = _PyPegen_name_token(p)) // NAME ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_28"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_28_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_28_type, _seq); + return _seq; } // _gather_27: NAME _loop0_28 @@ -12113,8 +12113,8 @@ _gather_27_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // NAME _loop0_28 expr_ty elem; asdl_seq * seq; @@ -12124,14 +12124,14 @@ _gather_27_rule(Parser *p) (seq = _loop0_28_rule(p)) // _loop0_28 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_29: ',' expression @@ -12141,29 +12141,29 @@ _tmp_29_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' expression - Token * literal; + Token * _literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_30: ('.' | '...') @@ -12173,46 +12173,46 @@ _loop0_30_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('.' | '...') void *_tmp_135_var; while ( (_tmp_135_var = _tmp_135_rule(p)) // '.' | '...' ) { - res = _tmp_135_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_135_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_30"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_30_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_30_type, _seq); + return _seq; } // _loop1_31: ('.' | '...') @@ -12222,50 +12222,50 @@ _loop1_31_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('.' | '...') void *_tmp_136_var; while ( (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' ) { - res = _tmp_136_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_136_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_31"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_31_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_31_type, _seq); + return _seq; } // _loop0_33: ',' import_from_as_name @@ -12275,54 +12275,54 @@ _loop0_33_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' import_from_as_name + Token * _literal; alias_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = import_from_as_name_rule(p)) // import_from_as_name ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_33"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_33_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_33_type, _seq); + return _seq; } // _gather_32: import_from_as_name _loop0_33 @@ -12332,8 +12332,8 @@ _gather_32_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // import_from_as_name _loop0_33 alias_ty elem; asdl_seq * seq; @@ -12343,14 +12343,14 @@ _gather_32_rule(Parser *p) (seq = _loop0_33_rule(p)) // _loop0_33 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_34: 'as' NAME @@ -12360,29 +12360,29 @@ _tmp_34_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' NAME - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_36: ',' dotted_as_name @@ -12392,54 +12392,54 @@ _loop0_36_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' dotted_as_name + Token * _literal; alias_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = dotted_as_name_rule(p)) // dotted_as_name ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_36"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_36_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_36_type, _seq); + return _seq; } // _gather_35: dotted_as_name _loop0_36 @@ -12449,8 +12449,8 @@ _gather_35_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // dotted_as_name _loop0_36 alias_ty elem; asdl_seq * seq; @@ -12460,14 +12460,14 @@ _gather_35_rule(Parser *p) (seq = _loop0_36_rule(p)) // _loop0_36 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_37: 'as' NAME @@ -12477,29 +12477,29 @@ _tmp_37_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' NAME - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_39: ',' with_item @@ -12509,54 +12509,54 @@ _loop0_39_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_39"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_39_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_39_type, _seq); + return _seq; } // _gather_38: with_item _loop0_39 @@ -12566,8 +12566,8 @@ _gather_38_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_39 withitem_ty elem; asdl_seq * seq; @@ -12577,14 +12577,14 @@ _gather_38_rule(Parser *p) (seq = _loop0_39_rule(p)) // _loop0_39 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_41: ',' with_item @@ -12594,54 +12594,54 @@ _loop0_41_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_41"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_41_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_41_type, _seq); + return _seq; } // _gather_40: with_item _loop0_41 @@ -12651,8 +12651,8 @@ _gather_40_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_41 withitem_ty elem; asdl_seq * seq; @@ -12662,14 +12662,14 @@ _gather_40_rule(Parser *p) (seq = _loop0_41_rule(p)) // _loop0_41 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_43: ',' with_item @@ -12679,54 +12679,54 @@ _loop0_43_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_43"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_43_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_43_type, _seq); + return _seq; } // _gather_42: with_item _loop0_43 @@ -12736,8 +12736,8 @@ _gather_42_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_43 withitem_ty elem; asdl_seq * seq; @@ -12747,14 +12747,14 @@ _gather_42_rule(Parser *p) (seq = _loop0_43_rule(p)) // _loop0_43 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_45: ',' with_item @@ -12764,54 +12764,54 @@ _loop0_45_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_45"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_45_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_45_type, _seq); + return _seq; } // _gather_44: with_item _loop0_45 @@ -12821,8 +12821,8 @@ _gather_44_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_45 withitem_ty elem; asdl_seq * seq; @@ -12832,14 +12832,14 @@ _gather_44_rule(Parser *p) (seq = _loop0_45_rule(p)) // _loop0_45 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_46: 'as' target @@ -12849,29 +12849,29 @@ _tmp_46_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' target - Token * keyword; + Token * _keyword; expr_ty t; if ( - (keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && (t = target_rule(p)) // target ) { - res = t; - if (res == NULL && PyErr_Occurred()) { + _res = t; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_47: except_block @@ -12881,50 +12881,50 @@ _loop1_47_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // except_block excepthandler_ty except_block_var; while ( (except_block_var = except_block_rule(p)) // except_block ) { - res = except_block_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = except_block_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_47"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_47_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_47_type, _seq); + return _seq; } // _tmp_48: 'as' target @@ -12934,29 +12934,29 @@ _tmp_48_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' target - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && (z = target_rule(p)) // target ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_49: 'from' expression @@ -12966,29 +12966,29 @@ _tmp_49_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'from' expression - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 514)) // token='from' + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_50: '->' expression @@ -12998,29 +12998,29 @@ _tmp_50_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '->' expression - Token * literal; + Token * _literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 51)) // token='->' + (_literal = _PyPegen_expect_token(p, 51)) // token='->' && (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_51: '->' expression @@ -13030,29 +13030,29 @@ _tmp_51_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '->' expression - Token * literal; + Token * _literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 51)) // token='->' + (_literal = _PyPegen_expect_token(p, 51)) // token='->' && (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_52: NEWLINE INDENT @@ -13062,8 +13062,8 @@ _tmp_52_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // NEWLINE INDENT Token * indent_var; Token * newline_var; @@ -13073,14 +13073,14 @@ _tmp_52_rule(Parser *p) (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { - res = _PyPegen_dummy_name(p, newline_var, indent_var); + _res = _PyPegen_dummy_name(p, newline_var, indent_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_53: param_no_default @@ -13090,46 +13090,46 @@ _loop0_53_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_53"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_53_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_53_type, _seq); + return _seq; } // _loop0_54: param_with_default @@ -13139,46 +13139,46 @@ _loop0_54_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_54"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_54_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_54_type, _seq); + return _seq; } // _loop0_55: param_with_default @@ -13188,46 +13188,46 @@ _loop0_55_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_55"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_55_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_55_type, _seq); + return _seq; } // _loop1_56: param_no_default @@ -13237,50 +13237,50 @@ _loop1_56_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_56"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_56_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_56_type, _seq); + return _seq; } // _loop0_57: param_with_default @@ -13290,46 +13290,46 @@ _loop0_57_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_57"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_57_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_57_type, _seq); + return _seq; } // _loop1_58: param_with_default @@ -13339,50 +13339,50 @@ _loop1_58_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_58"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_58_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_58_type, _seq); + return _seq; } // _loop1_59: param_no_default @@ -13392,50 +13392,50 @@ _loop1_59_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_59"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_59_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_59_type, _seq); + return _seq; } // _loop1_60: param_no_default @@ -13445,50 +13445,50 @@ _loop1_60_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_60"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_60_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_60_type, _seq); + return _seq; } // _loop0_61: param_no_default @@ -13498,46 +13498,46 @@ _loop0_61_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_61"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_61_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_61_type, _seq); + return _seq; } // _loop1_62: param_with_default @@ -13547,50 +13547,50 @@ _loop1_62_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_62"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_62_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_62_type, _seq); + return _seq; } // _loop0_63: param_no_default @@ -13600,46 +13600,46 @@ _loop0_63_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_63"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_63_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_63_type, _seq); + return _seq; } // _loop1_64: param_with_default @@ -13649,50 +13649,50 @@ _loop1_64_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_64"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_64_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_64_type, _seq); + return _seq; } // _loop0_65: param_maybe_default @@ -13702,46 +13702,46 @@ _loop0_65_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_maybe_default NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default ) { - res = param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_65"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_65_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_65_type, _seq); + return _seq; } // _loop1_66: param_maybe_default @@ -13751,50 +13751,50 @@ _loop1_66_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_maybe_default NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default ) { - res = param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_66"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_66_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_66_type, _seq); + return _seq; } // _loop1_67: ('@' named_expression NEWLINE) @@ -13804,50 +13804,50 @@ _loop1_67_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('@' named_expression NEWLINE) void *_tmp_137_var; while ( (_tmp_137_var = _tmp_137_rule(p)) // '@' named_expression NEWLINE ) { - res = _tmp_137_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_137_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_67"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_67_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_67_type, _seq); + return _seq; } // _tmp_68: '(' arguments? ')' @@ -13857,32 +13857,32 @@ _tmp_68_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '(' arguments? ')' - Token * literal; - Token * literal_1; + Token * _literal; + Token * _literal_1; void *z; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && (z = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_70: ',' star_expression @@ -13892,54 +13892,54 @@ _loop0_70_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' star_expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = star_expression_rule(p)) // star_expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_70"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_70_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_70_type, _seq); + return _seq; } // _gather_69: star_expression _loop0_70 @@ -13949,8 +13949,8 @@ _gather_69_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // star_expression _loop0_70 expr_ty elem; asdl_seq * seq; @@ -13960,14 +13960,14 @@ _gather_69_rule(Parser *p) (seq = _loop0_70_rule(p)) // _loop0_70 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_71: (',' star_expression) @@ -13977,50 +13977,50 @@ _loop1_71_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (',' star_expression) void *_tmp_138_var; while ( (_tmp_138_var = _tmp_138_rule(p)) // ',' star_expression ) { - res = _tmp_138_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_138_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_71"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_71_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_71_type, _seq); + return _seq; } // _loop0_73: ',' star_named_expression @@ -14030,54 +14030,54 @@ _loop0_73_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' star_named_expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = star_named_expression_rule(p)) // star_named_expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_73"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_73_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_73_type, _seq); + return _seq; } // _gather_72: star_named_expression _loop0_73 @@ -14087,8 +14087,8 @@ _gather_72_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // star_named_expression _loop0_73 expr_ty elem; asdl_seq * seq; @@ -14098,14 +14098,14 @@ _gather_72_rule(Parser *p) (seq = _loop0_73_rule(p)) // _loop0_73 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_74: (',' expression) @@ -14115,50 +14115,50 @@ _loop1_74_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (',' expression) void *_tmp_139_var; while ( (_tmp_139_var = _tmp_139_rule(p)) // ',' expression ) { - res = _tmp_139_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_139_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_74"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_74_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_74_type, _seq); + return _seq; } // _loop0_75: lambda_param_no_default @@ -14168,46 +14168,46 @@ _loop0_75_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_75"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_75_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_75_type, _seq); + return _seq; } // _loop0_76: lambda_param_with_default @@ -14217,46 +14217,46 @@ _loop0_76_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_76"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_76_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_76_type, _seq); + return _seq; } // _loop0_77: lambda_param_with_default @@ -14266,46 +14266,46 @@ _loop0_77_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_77"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_77_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_77_type, _seq); + return _seq; } // _loop1_78: lambda_param_no_default @@ -14315,50 +14315,50 @@ _loop1_78_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_78"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_78_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_78_type, _seq); + return _seq; } // _loop0_79: lambda_param_with_default @@ -14368,46 +14368,46 @@ _loop0_79_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_79"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_79_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_79_type, _seq); + return _seq; } // _loop1_80: lambda_param_with_default @@ -14417,50 +14417,50 @@ _loop1_80_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_80"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_80_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_80_type, _seq); + return _seq; } // _loop1_81: lambda_param_no_default @@ -14470,50 +14470,50 @@ _loop1_81_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_81"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_81_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_81_type, _seq); + return _seq; } // _loop1_82: lambda_param_no_default @@ -14523,50 +14523,50 @@ _loop1_82_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_82"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_82_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_82_type, _seq); + return _seq; } // _loop0_83: lambda_param_no_default @@ -14576,46 +14576,46 @@ _loop0_83_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_83"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_83_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_83_type, _seq); + return _seq; } // _loop1_84: lambda_param_with_default @@ -14625,50 +14625,50 @@ _loop1_84_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_84"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_84_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_84_type, _seq); + return _seq; } // _loop0_85: lambda_param_no_default @@ -14678,46 +14678,46 @@ _loop0_85_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_85"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_85_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_85_type, _seq); + return _seq; } // _loop1_86: lambda_param_with_default @@ -14727,50 +14727,50 @@ _loop1_86_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_86"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_86_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_86_type, _seq); + return _seq; } // _loop0_87: lambda_param_maybe_default @@ -14780,46 +14780,46 @@ _loop0_87_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_maybe_default NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { - res = lambda_param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_87"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_87_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_87_type, _seq); + return _seq; } // _loop1_88: lambda_param_maybe_default @@ -14829,50 +14829,50 @@ _loop1_88_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_maybe_default NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { - res = lambda_param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_88"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_88_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_88_type, _seq); + return _seq; } // _loop1_89: ('or' conjunction) @@ -14882,50 +14882,50 @@ _loop1_89_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('or' conjunction) void *_tmp_140_var; while ( (_tmp_140_var = _tmp_140_rule(p)) // 'or' conjunction ) { - res = _tmp_140_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_140_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_89"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_89_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_89_type, _seq); + return _seq; } // _loop1_90: ('and' inversion) @@ -14935,50 +14935,50 @@ _loop1_90_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('and' inversion) void *_tmp_141_var; while ( (_tmp_141_var = _tmp_141_rule(p)) // 'and' inversion ) { - res = _tmp_141_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_141_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_90"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_90_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_90_type, _seq); + return _seq; } // _loop1_91: compare_op_bitwise_or_pair @@ -14988,50 +14988,50 @@ _loop1_91_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // compare_op_bitwise_or_pair CmpopExprPair* compare_op_bitwise_or_pair_var; while ( (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair ) { - res = compare_op_bitwise_or_pair_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = compare_op_bitwise_or_pair_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_91"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_91_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_91_type, _seq); + return _seq; } // _tmp_92: '!=' @@ -15041,26 +15041,26 @@ _tmp_92_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '!=' Token * tok; if ( (tok = _PyPegen_expect_token(p, 28)) // token='!=' ) { - res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_94: ',' slice @@ -15070,54 +15070,54 @@ _loop0_94_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' slice + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = slice_rule(p)) // slice ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_94"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_94_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_94_type, _seq); + return _seq; } // _gather_93: slice _loop0_94 @@ -15127,8 +15127,8 @@ _gather_93_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // slice _loop0_94 expr_ty elem; asdl_seq * seq; @@ -15138,14 +15138,14 @@ _gather_93_rule(Parser *p) (seq = _loop0_94_rule(p)) // _loop0_94 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_95: ':' expression? @@ -15155,29 +15155,29 @@ _tmp_95_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ':' expression? + Token * _literal; void *d; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (d = expression_rule(p), 1) // expression? ) { - res = d; - if (res == NULL && PyErr_Occurred()) { + _res = d; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_96: tuple | group | genexp @@ -15187,18 +15187,18 @@ _tmp_96_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // tuple expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple ) { - res = tuple_var; + _res = tuple_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // group expr_ty group_var; @@ -15206,10 +15206,10 @@ _tmp_96_rule(Parser *p) (group_var = group_rule(p)) // group ) { - res = group_var; + _res = group_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // genexp expr_ty genexp_var; @@ -15217,14 +15217,14 @@ _tmp_96_rule(Parser *p) (genexp_var = genexp_rule(p)) // genexp ) { - res = genexp_var; + _res = genexp_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_97: list | listcomp @@ -15234,18 +15234,18 @@ _tmp_97_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // list expr_ty list_var; if ( (list_var = list_rule(p)) // list ) { - res = list_var; + _res = list_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // listcomp expr_ty listcomp_var; @@ -15253,14 +15253,14 @@ _tmp_97_rule(Parser *p) (listcomp_var = listcomp_rule(p)) // listcomp ) { - res = listcomp_var; + _res = listcomp_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_98: dict | set | dictcomp | setcomp @@ -15270,18 +15270,18 @@ _tmp_98_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // dict expr_ty dict_var; if ( (dict_var = dict_rule(p)) // dict ) { - res = dict_var; + _res = dict_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // set expr_ty set_var; @@ -15289,10 +15289,10 @@ _tmp_98_rule(Parser *p) (set_var = set_rule(p)) // set ) { - res = set_var; + _res = set_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // dictcomp expr_ty dictcomp_var; @@ -15300,10 +15300,10 @@ _tmp_98_rule(Parser *p) (dictcomp_var = dictcomp_rule(p)) // dictcomp ) { - res = dictcomp_var; + _res = dictcomp_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // setcomp expr_ty setcomp_var; @@ -15311,14 +15311,14 @@ _tmp_98_rule(Parser *p) (setcomp_var = setcomp_rule(p)) // setcomp ) { - res = setcomp_var; + _res = setcomp_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_99: STRING @@ -15328,50 +15328,50 @@ _loop1_99_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // STRING expr_ty string_var; while ( (string_var = _PyPegen_string_token(p)) // STRING ) { - res = string_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = string_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_99"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_99_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_99_type, _seq); + return _seq; } // _tmp_100: star_named_expression ',' star_named_expressions? @@ -15381,32 +15381,32 @@ _tmp_100_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // star_named_expression ',' star_named_expressions? - Token * literal; + Token * _literal; expr_ty y; void *z; if ( (y = star_named_expression_rule(p)) // star_named_expression && - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (z = star_named_expressions_rule(p), 1) // star_named_expressions? ) { - res = _PyPegen_seq_insert_in_front ( p , y , z ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_insert_in_front ( p , y , z ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_101: yield_expr | named_expression @@ -15416,18 +15416,18 @@ _tmp_101_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // named_expression expr_ty named_expression_var; @@ -15435,14 +15435,14 @@ _tmp_101_rule(Parser *p) (named_expression_var = named_expression_rule(p)) // named_expression ) { - res = named_expression_var; + _res = named_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_103: ',' kvpair @@ -15452,54 +15452,54 @@ _loop0_103_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kvpair + Token * _literal; KeyValuePair* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = kvpair_rule(p)) // kvpair ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_103"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_103_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_103_type, _seq); + return _seq; } // _gather_102: kvpair _loop0_103 @@ -15509,8 +15509,8 @@ _gather_102_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kvpair _loop0_103 KeyValuePair* elem; asdl_seq * seq; @@ -15520,14 +15520,14 @@ _gather_102_rule(Parser *p) (seq = _loop0_103_rule(p)) // _loop0_103 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_104: for_if_clause @@ -15537,50 +15537,50 @@ _loop1_104_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // for_if_clause comprehension_ty for_if_clause_var; while ( (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause ) { - res = for_if_clause_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = for_if_clause_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_104"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_104_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_104_type, _seq); + return _seq; } // _loop0_105: ('if' disjunction) @@ -15590,46 +15590,46 @@ _loop0_105_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('if' disjunction) void *_tmp_142_var; while ( (_tmp_142_var = _tmp_142_rule(p)) // 'if' disjunction ) { - res = _tmp_142_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_142_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_105"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_105_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_105_type, _seq); + return _seq; } // _loop0_106: ('if' disjunction) @@ -15639,46 +15639,46 @@ _loop0_106_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('if' disjunction) void *_tmp_143_var; while ( (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction ) { - res = _tmp_143_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_143_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_106"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_106_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_106_type, _seq); + return _seq; } // _tmp_107: ',' args @@ -15688,29 +15688,29 @@ _tmp_107_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' args + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (c = args_rule(p)) // args ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_108: ',' args @@ -15720,29 +15720,29 @@ _tmp_108_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' args + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (c = args_rule(p)) // args ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_110: ',' kwarg_or_starred @@ -15752,54 +15752,54 @@ _loop0_110_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_110"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_110_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_110_type, _seq); + return _seq; } // _gather_109: kwarg_or_starred _loop0_110 @@ -15809,8 +15809,8 @@ _gather_109_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_starred _loop0_110 KeywordOrStarred* elem; asdl_seq * seq; @@ -15820,14 +15820,14 @@ _gather_109_rule(Parser *p) (seq = _loop0_110_rule(p)) // _loop0_110 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_112: ',' kwarg_or_double_starred @@ -15837,54 +15837,54 @@ _loop0_112_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_double_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_112"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_112_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_112_type, _seq); + return _seq; } // _gather_111: kwarg_or_double_starred _loop0_112 @@ -15894,8 +15894,8 @@ _gather_111_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_double_starred _loop0_112 KeywordOrStarred* elem; asdl_seq * seq; @@ -15905,14 +15905,14 @@ _gather_111_rule(Parser *p) (seq = _loop0_112_rule(p)) // _loop0_112 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_114: ',' kwarg_or_starred @@ -15922,54 +15922,54 @@ _loop0_114_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_114"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_114_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_114_type, _seq); + return _seq; } // _gather_113: kwarg_or_starred _loop0_114 @@ -15979,8 +15979,8 @@ _gather_113_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_starred _loop0_114 KeywordOrStarred* elem; asdl_seq * seq; @@ -15990,14 +15990,14 @@ _gather_113_rule(Parser *p) (seq = _loop0_114_rule(p)) // _loop0_114 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_116: ',' kwarg_or_double_starred @@ -16007,54 +16007,54 @@ _loop0_116_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_double_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_116"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_116_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_116_type, _seq); + return _seq; } // _gather_115: kwarg_or_double_starred _loop0_116 @@ -16064,8 +16064,8 @@ _gather_115_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_double_starred _loop0_116 KeywordOrStarred* elem; asdl_seq * seq; @@ -16075,14 +16075,14 @@ _gather_115_rule(Parser *p) (seq = _loop0_116_rule(p)) // _loop0_116 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_117: (',' star_target) @@ -16092,46 +16092,46 @@ _loop0_117_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (',' star_target) void *_tmp_144_var; while ( (_tmp_144_var = _tmp_144_rule(p)) // ',' star_target ) { - res = _tmp_144_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_144_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_117"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_117_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_117_type, _seq); + return _seq; } // _loop0_119: ',' star_target @@ -16141,54 +16141,54 @@ _loop0_119_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' star_target + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = star_target_rule(p)) // star_target ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_119"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_119_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_119_type, _seq); + return _seq; } // _gather_118: star_target _loop0_119 @@ -16198,8 +16198,8 @@ _gather_118_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // star_target _loop0_119 expr_ty elem; asdl_seq * seq; @@ -16209,14 +16209,14 @@ _gather_118_rule(Parser *p) (seq = _loop0_119_rule(p)) // _loop0_119 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_120: !'*' star_target @@ -16226,8 +16226,8 @@ _tmp_120_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // !'*' star_target expr_ty star_target_var; if ( @@ -16236,14 +16236,14 @@ _tmp_120_rule(Parser *p) (star_target_var = star_target_rule(p)) // star_target ) { - res = star_target_var; + _res = star_target_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_122: ',' del_target @@ -16253,54 +16253,54 @@ _loop0_122_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' del_target + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = del_target_rule(p)) // del_target ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_122"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_122_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_122_type, _seq); + return _seq; } // _gather_121: del_target _loop0_122 @@ -16310,8 +16310,8 @@ _gather_121_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // del_target _loop0_122 expr_ty elem; asdl_seq * seq; @@ -16321,14 +16321,14 @@ _gather_121_rule(Parser *p) (seq = _loop0_122_rule(p)) // _loop0_122 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_124: ',' target @@ -16338,54 +16338,54 @@ _loop0_124_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' target + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (elem = target_rule(p)) // target ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_124"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_124_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_124_type, _seq); + return _seq; } // _gather_123: target _loop0_124 @@ -16395,8 +16395,8 @@ _gather_123_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // target _loop0_124 expr_ty elem; asdl_seq * seq; @@ -16406,14 +16406,14 @@ _gather_123_rule(Parser *p) (seq = _loop0_124_rule(p)) // _loop0_124 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_125: args | expression for_if_clauses @@ -16423,18 +16423,18 @@ _tmp_125_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // args expr_ty args_var; if ( (args_var = args_rule(p)) // args ) { - res = args_var; + _res = args_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // expression for_if_clauses expr_ty expression_var; @@ -16445,14 +16445,14 @@ _tmp_125_rule(Parser *p) (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); + _res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_126: '=' annotated_rhs @@ -16462,25 +16462,25 @@ _tmp_126_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' annotated_rhs + Token * _literal; expr_ty annotated_rhs_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && (annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs ) { - res = _PyPegen_dummy_name(p, literal, annotated_rhs_var); + _res = _PyPegen_dummy_name(p, _literal, annotated_rhs_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_127: '=' | augassign @@ -16490,18 +16490,18 @@ _tmp_127_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // augassign AugOperator* augassign_var; @@ -16509,14 +16509,14 @@ _tmp_127_rule(Parser *p) (augassign_var = augassign_rule(p)) // augassign ) { - res = augassign_var; + _res = augassign_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_128: yield_expr | star_expressions @@ -16526,18 +16526,18 @@ _tmp_128_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; @@ -16545,14 +16545,14 @@ _tmp_128_rule(Parser *p) (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_129: '[' | '(' | '{' @@ -16562,44 +16562,44 @@ _tmp_129_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '[' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 9)) // token='[' + (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '{' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 25)) // token='{' + (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_130: param_no_default @@ -16609,46 +16609,46 @@ _loop0_130_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_130"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_130_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_130_type, _seq); + return _seq; } // _tmp_131: slash_with_default | param_with_default+ @@ -16658,18 +16658,18 @@ _tmp_131_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // slash_with_default SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - res = slash_with_default_var; + _res = slash_with_default_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // param_with_default+ asdl_seq * _loop1_145_var; @@ -16677,14 +16677,14 @@ _tmp_131_rule(Parser *p) (_loop1_145_var = _loop1_145_rule(p)) // param_with_default+ ) { - res = _loop1_145_var; + _res = _loop1_145_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_132: ')' | ',' (')' | '**') @@ -16694,36 +16694,36 @@ _tmp_132_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ')' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 8)) // token=')' + (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // ',' (')' | '**') + Token * _literal; void *_tmp_146_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (_tmp_146_var = _tmp_146_rule(p)) // ')' | '**' ) { - res = _PyPegen_dummy_name(p, literal, _tmp_146_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_146_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_133: ':' | ',' (':' | '**') @@ -16733,36 +16733,36 @@ _tmp_133_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ':' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // ',' (':' | '**') + Token * _literal; void *_tmp_147_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (_tmp_147_var = _tmp_147_rule(p)) // ':' | '**' ) { - res = _PyPegen_dummy_name(p, literal, _tmp_147_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_147_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_134: star_targets '=' @@ -16772,29 +16772,29 @@ _tmp_134_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // star_targets '=' - Token * literal; + Token * _literal; expr_ty z; if ( (z = star_targets_rule(p)) // star_targets && - (literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_135: '.' | '...' @@ -16804,33 +16804,33 @@ _tmp_135_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '.' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '...' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 52)) // token='...' + (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_136: '.' | '...' @@ -16840,33 +16840,33 @@ _tmp_136_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '.' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 23)) // token='.' + (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '...' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 52)) // token='...' + (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_137: '@' named_expression NEWLINE @@ -16876,32 +16876,32 @@ _tmp_137_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '@' named_expression NEWLINE + Token * _literal; expr_ty f; - Token * literal; Token * newline_var; if ( - (literal = _PyPegen_expect_token(p, 49)) // token='@' + (_literal = _PyPegen_expect_token(p, 49)) // token='@' && (f = named_expression_rule(p)) // named_expression && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = f; - if (res == NULL && PyErr_Occurred()) { + _res = f; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_138: ',' star_expression @@ -16911,29 +16911,29 @@ _tmp_138_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' star_expression + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (c = star_expression_rule(p)) // star_expression ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_139: ',' expression @@ -16943,29 +16943,29 @@ _tmp_139_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' expression + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (c = expression_rule(p)) // expression ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_140: 'or' conjunction @@ -16975,29 +16975,29 @@ _tmp_140_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'or' conjunction + Token * _keyword; expr_ty c; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 532)) // token='or' + (_keyword = _PyPegen_expect_token(p, 532)) // token='or' && (c = conjunction_rule(p)) // conjunction ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_141: 'and' inversion @@ -17007,29 +17007,29 @@ _tmp_141_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'and' inversion + Token * _keyword; expr_ty c; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 533)) // token='and' + (_keyword = _PyPegen_expect_token(p, 533)) // token='and' && (c = inversion_rule(p)) // inversion ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_142: 'if' disjunction @@ -17039,29 +17039,29 @@ _tmp_142_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'if' disjunction - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 510)) // token='if' + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_143: 'if' disjunction @@ -17071,29 +17071,29 @@ _tmp_143_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'if' disjunction - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 510)) // token='if' + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_144: ',' star_target @@ -17103,29 +17103,29 @@ _tmp_144_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' star_target + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) // token=',' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (c = star_target_rule(p)) // star_target ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_145: param_with_default @@ -17135,50 +17135,50 @@ _loop1_145_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0 || p->error_indicator) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_145"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_145_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_145_type, _seq); + return _seq; } // _tmp_146: ')' | '**' @@ -17188,33 +17188,33 @@ _tmp_146_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ')' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 8)) // token=')' + (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_147: ':' | '**' @@ -17224,33 +17224,33 @@ _tmp_147_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ':' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 11)) // token=':' + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 35)) // token='**' + (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } void * diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index ffb18e47e4a9a8..b55a652ac8060d 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -132,7 +132,7 @@ void *_PyPegen_dummy_name(Parser *p, ...); #define UNUSED(expr) do { (void)(expr); } while (0) #define EXTRA_EXPR(head, tail) head->lineno, head->col_offset, tail->end_lineno, tail->end_col_offset, p->arena -#define EXTRA start_lineno, start_col_offset, end_lineno, end_col_offset, p->arena +#define EXTRA _start_lineno, _start_col_offset, _end_lineno, _end_col_offset, p->arena #define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 1, msg, ##__VA_ARGS__) #define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, 1, msg, ##__VA_ARGS__) #define RAISE_SYNTAX_ERROR_NO_COL_OFFSET(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 0, msg, ##__VA_ARGS__) diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index b7a9942c2fdd27..6c9aa3f2ba7866 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -1,5 +1,5 @@ import ast -from dataclasses import dataclass, field +from dataclasses import dataclass import re from typing import Any, Dict, IO, Optional, List, Text, Tuple, Set from enum import Enum @@ -101,7 +101,7 @@ def keyword_helper(self, keyword: str) -> FunctionCall: if keyword not in self.keyword_cache: self.keyword_cache[keyword] = self.gen.keyword_type() return FunctionCall( - assigned_variable="keyword", + assigned_variable="_keyword", function="_PyPegen_expect_token", arguments=["p", self.keyword_cache[keyword]], return_type="Token *", @@ -140,7 +140,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type=type, - comment=f"{node}" + comment=f"{node}", ) def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: @@ -151,7 +151,7 @@ def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: assert val in self.exact_tokens, f"{node.value} is not a known literal" type = self.exact_tokens[val] return FunctionCall( - assigned_variable="literal", + assigned_variable="_literal", function=f"_PyPegen_expect_token", arguments=["p", type], nodetype=NodeTypes.GENERIC_TOKEN, @@ -175,8 +175,10 @@ def can_we_inline(node: Rhs) -> int: else: name = self.gen.name_node(node) self.cache[node] = FunctionCall( - assigned_variable=f"{name}_var", function=f"{name}_rule", arguments=["p"], - comment=f"{node}" + assigned_variable=f"{name}_var", + function=f"{name}_rule", + arguments=["p"], + comment=f"{node}", ) return self.cache[node] @@ -217,11 +219,11 @@ def visit_NegativeLookahead(self, node: NegativeLookahead) -> FunctionCall: def visit_Opt(self, node: Opt) -> FunctionCall: call = self.visit(node.node) return FunctionCall( - assigned_variable="opt_var", + assigned_variable="_opt_var", function=call.function, arguments=call.arguments, force_true=True, - comment=f"{node}" + comment=f"{node}", ) def visit_Repeat0(self, node: Repeat0) -> FunctionCall: @@ -268,7 +270,7 @@ def visit_Group(self, node: Group) -> FunctionCall: def visit_Cut(self, node: Cut) -> FunctionCall: return FunctionCall( - assigned_variable="cut_var", + assigned_variable="_cut_var", return_type="int", function="1", nodetype=NodeTypes.CUT_OPERATOR, @@ -418,46 +420,46 @@ def _set_up_token_start_metadata_extraction(self) -> None: self.print("p->error_indicator = 1;") self.print("return NULL;") self.print("}") - self.print("int start_lineno = p->tokens[mark]->lineno;") - self.print("UNUSED(start_lineno); // Only used by EXTRA macro") - self.print("int start_col_offset = p->tokens[mark]->col_offset;") - self.print("UNUSED(start_col_offset); // Only used by EXTRA macro") + self.print("int _start_lineno = p->tokens[_mark]->lineno;") + self.print("UNUSED(_start_lineno); // Only used by EXTRA macro") + self.print("int _start_col_offset = p->tokens[_mark]->col_offset;") + self.print("UNUSED(_start_col_offset); // Only used by EXTRA macro") def _set_up_token_end_metadata_extraction(self) -> None: - self.print("Token *token = _PyPegen_get_last_nonnwhitespace_token(p);") - self.print("if (token == NULL) {") + self.print("Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);") + self.print("if (_token == NULL) {") with self.indent(): self.print("return NULL;") self.print("}") - self.print(f"int end_lineno = token->end_lineno;") - self.print("UNUSED(end_lineno); // Only used by EXTRA macro") - self.print(f"int end_col_offset = token->end_col_offset;") - self.print("UNUSED(end_col_offset); // Only used by EXTRA macro") + self.print("int _end_lineno = _token->end_lineno;") + self.print("UNUSED(_end_lineno); // Only used by EXTRA macro") + self.print("int _end_col_offset = _token->end_col_offset;") + self.print("UNUSED(_end_col_offset); // Only used by EXTRA macro") def _set_up_rule_memoization(self, node: Rule, result_type: str) -> None: self.print("{") with self.indent(): - self.print(f"{result_type} res = NULL;") - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &res))") + self.print(f"{result_type} _res = NULL;") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") with self.indent(): - self.print("return res;") - self.print("int mark = p->mark;") - self.print("int resmark = p->mark;") + self.print("return _res;") + self.print("int _mark = p->mark;") + self.print("int _resmark = p->mark;") self.print("while (1) {") with self.indent(): self.call_with_errorcheck_return( - f"_PyPegen_update_memo(p, mark, {node.name}_type, res)", "res" + f"_PyPegen_update_memo(p, _mark, {node.name}_type, _res)", "_res" ) - self.print("p->mark = mark;") - self.print(f"void *raw = {node.name}_raw(p);") - self.print("if (raw == NULL || p->mark <= resmark)") + self.print("p->mark = _mark;") + self.print(f"void *_raw = {node.name}_raw(p);") + self.print("if (_raw == NULL || p->mark <= _resmark)") with self.indent(): self.print("break;") - self.print("resmark = p->mark;") - self.print("res = raw;") + self.print(f"_resmark = p->mark;") + self.print("_res = _raw;") self.print("}") - self.print("p->mark = resmark;") - self.print("return res;") + self.print(f"p->mark = _resmark;") + self.print("return _res;") self.print("}") self.print(f"static {result_type}") self.print(f"{node.name}_raw(Parser *p)") @@ -473,12 +475,12 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N with self.indent(): self.print("return NULL;") self.print("}") - self.print(f"{result_type} res = NULL;") + self.print(f"{result_type} _res = NULL;") if memoize: - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &res))") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") with self.indent(): - self.print("return res;") - self.print("int mark = p->mark;") + self.print("return _res;") + self.print("int _mark = p->mark;") if any(alt.action and "EXTRA" in alt.action for alt in rhs.alts): self._set_up_token_start_metadata_extraction() self.visit( @@ -488,13 +490,13 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N rulename=node.name if memoize else None, ) if self.debug: - self.print(f'fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') - self.print("res = NULL;") + self.print('fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') + self.print("_res = NULL;") self.print(" done:") with self.indent(): if memoize: - self.print(f"_PyPegen_insert_memo(p, mark, {node.name}_type, res);") - self.print("return res;") + self.print(f"_PyPegen_insert_memo(p, _mark, {node.name}_type, _res);") + self.print("return _res;") def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: memoize = self._should_memoize(node) @@ -505,17 +507,17 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: with self.indent(): self.print("return NULL;") self.print("}") - self.print(f"void *res = NULL;") + self.print("void *_res = NULL;") if memoize: - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &res))") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") with self.indent(): - self.print("return res;") - self.print("int mark = p->mark;") - self.print("int start_mark = p->mark;") - self.print("void **children = PyMem_Malloc(sizeof(void *));") - self.out_of_memory_return(f"!children", "NULL") - self.print("ssize_t children_capacity = 1;") - self.print("ssize_t n = 0;") + self.print("return _res;") + self.print("int _mark = p->mark;") + self.print("int _start_mark = p->mark;") + self.print("void **_children = PyMem_Malloc(sizeof(void *));") + self.out_of_memory_return(f"!_children", "NULL") + self.print("ssize_t _children_capacity = 1;") + self.print("ssize_t _n = 0;") if any(alt.action and "EXTRA" in alt.action for alt in rhs.alts): self._set_up_token_start_metadata_extraction() self.visit( @@ -525,23 +527,23 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: rulename=node.name if memoize else None, ) if is_repeat1: - self.print("if (n == 0 || p->error_indicator) {") + self.print("if (_n == 0 || p->error_indicator) {") with self.indent(): - self.print("PyMem_Free(children);") + self.print("PyMem_Free(_children);") self.print("return NULL;") self.print("}") - self.print("asdl_seq *seq = _Py_asdl_seq_new(n, p->arena);") + self.print("asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena);") self.out_of_memory_return( - f"!seq", + "!_seq", "NULL", message=f"asdl_seq_new {node.name}", - cleanup_code="PyMem_Free(children);", + cleanup_code="PyMem_Free(_children);", ) - self.print("for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]);") - self.print("PyMem_Free(children);") + self.print("for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]);") + self.print("PyMem_Free(_children);") if node.name: - self.print(f"_PyPegen_insert_memo(p, start_mark, {node.name}_type, seq);") - self.print("return seq;") + self.print(f"_PyPegen_insert_memo(p, _start_mark, {node.name}_type, _seq);") + self.print("return _seq;") def visit_Rule(self, node: Rule) -> None: is_loop = node.is_loop() @@ -599,9 +601,9 @@ def join_conditions(self, keyword: str, node: Any) -> None: self.print(")") def emit_action(self, node: Alt, cleanup_code: Optional[str] = None) -> None: - self.print(f"res = {node.action};") + self.print(f"_res = {node.action};") - self.print("if (res == NULL && PyErr_Occurred()) {") + self.print("if (_res == NULL && PyErr_Occurred()) {") with self.indent(): self.print("p->error_indicator = 1;") if cleanup_code: @@ -611,7 +613,7 @@ def emit_action(self, node: Alt, cleanup_code: Optional[str] = None) -> None: if self.debug: self.print( - f'fprintf(stderr, "Hit with action [%d-%d]: %s\\n", mark, p->mark, "{node}");' + f'fprintf(stderr, "Hit with action [%d-%d]: %s\\n", _mark, p->mark, "{node}");' ) def emit_default_action(self, is_gather: bool, node: Alt) -> None: @@ -619,7 +621,7 @@ def emit_default_action(self, is_gather: bool, node: Alt) -> None: if is_gather: assert len(self.local_variable_names) == 2 self.print( - f"res = _PyPegen_seq_insert_in_front(p, " + f"_res = _PyPegen_seq_insert_in_front(p, " f"{self.local_variable_names[0]}, {self.local_variable_names[1]});" ) else: @@ -628,17 +630,17 @@ def emit_default_action(self, is_gather: bool, node: Alt) -> None: f'fprintf(stderr, "Hit without action [%d:%d]: %s\\n", mark, p->mark, "{node}");' ) self.print( - f"res = _PyPegen_dummy_name(p, {', '.join(self.local_variable_names)});" + f"_res = _PyPegen_dummy_name(p, {', '.join(self.local_variable_names)});" ) else: if self.debug: self.print( f'fprintf(stderr, "Hit with default action [%d:%d]: %s\\n", mark, p->mark, "{node}");' ) - self.print(f"res = {self.local_variable_names[0]};") + self.print(f"_res = {self.local_variable_names[0]};") def emit_dummy_action(self) -> None: - self.print(f"res = _PyPegen_dummy_name(p);") + self.print("_res = _PyPegen_dummy_name(p);") def handle_alt_normal(self, node: Alt, is_gather: bool) -> None: self.join_conditions(keyword="if", node=node) @@ -671,20 +673,22 @@ def handle_alt_loop(self, node: Alt, is_gather: bool, rulename: Optional[str]) - if self.skip_actions: self.emit_dummy_action() elif node.action: - self.emit_action(node, cleanup_code="PyMem_Free(children);") + self.emit_action(node, cleanup_code="PyMem_Free(_children);") else: self.emit_default_action(is_gather, node) # Add the result of rule to the temporary buffer of children. This buffer # will populate later an asdl_seq with all elements to return. - self.print("if (n == children_capacity) {") + self.print("if (_n == _children_capacity) {") with self.indent(): - self.print("children_capacity *= 2;") - self.print("children = PyMem_Realloc(children, children_capacity*sizeof(void *));") - self.out_of_memory_return(f"!children", "NULL", message=f"realloc {rulename}") + self.print("_children_capacity *= 2;") + self.print( + "_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));" + ) + self.out_of_memory_return(f"!_children", "NULL", message=f"realloc {rulename}") self.print("}") - self.print(f"children[n++] = res;") - self.print("mark = p->mark;") + self.print("_children[_n++] = _res;") + self.print("_mark = p->mark;") self.print("}") def visit_Alt( @@ -699,11 +703,11 @@ def visit_Alt( var_type = "void *" else: var_type += " " - if v == "cut_var": + if v == "_cut_var": v += " = 0" # cut_var must be initialized self.print(f"{var_type}{v};") - if v == "opt_var": - self.print("UNUSED(opt_var); // Silence compiler warnings") + if v == "_opt_var": + self.print("UNUSED(_opt_var); // Silence compiler warnings") with self.local_variable_context(): if is_loop: @@ -711,9 +715,9 @@ def visit_Alt( else: self.handle_alt_normal(node, is_gather) - self.print("p->mark = mark;") - if "cut_var" in vars: - self.print("if (cut_var) return NULL;") + self.print("p->mark = _mark;") + if "_cut_var" in vars: + self.print("if (_cut_var) return NULL;") self.print("}") def collect_vars(self, node: Alt) -> Dict[Optional[str], Optional[str]]: diff --git a/Tools/peg_generator/pegen/parser_generator.py b/Tools/peg_generator/pegen/parser_generator.py index 03452510b9669b..689022b12da203 100644 --- a/Tools/peg_generator/pegen/parser_generator.py +++ b/Tools/peg_generator/pegen/parser_generator.py @@ -27,6 +27,11 @@ def visit_NameLeaf(self, node: NameLeaf) -> None: # TODO: Add line/col info to (leaf) nodes raise GrammarError(f"Dangling reference to rule {node.value!r}") + def visit_NamedItem(self, node: NameLeaf) -> None: + if node.name and node.name.startswith("_"): + raise GrammarError(f"Variable names cannot start with underscore: '{node.name}'") + self.visit(node.item) + class ParserGenerator: @@ -36,6 +41,7 @@ def __init__(self, grammar: Grammar, tokens: Dict[int, str], file: Optional[IO[T self.grammar = grammar self.tokens = tokens self.rules = grammar.rules + self.validate_rule_names() if "trailer" not in grammar.metas and "start" not in self.rules: raise GrammarError("Grammar without a trailer must have a 'start' rule") checker = RuleCheckingVisitor(self.rules, self.tokens) @@ -51,6 +57,11 @@ def __init__(self, grammar: Grammar, tokens: Dict[int, str], file: Optional[IO[T self.all_rules: Dict[str, Rule] = {} # Rules + temporal rules self._local_variable_stack: List[List[str]] = [] + def validate_rule_names(self): + for rule in self.rules: + if rule.startswith("_"): + raise GrammarError(f"Rule names cannot start with underscore: '{rule}'") + @contextlib.contextmanager def local_variable_context(self) -> Iterator[None]: self._local_variable_stack.append([]) From 85bdec1def789cdb60ab7ffe115e426267b00a60 Mon Sep 17 00:00:00 2001 From: Andre Delfino Date: Sun, 10 May 2020 02:15:54 -0300 Subject: [PATCH 052/115] Add link to Enum class (GH-19884) --- Doc/library/types.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 1d081e2c54868d..cdddb46783a470 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -379,7 +379,7 @@ Additional Utility Classes and Functions class's __getattr__ method; this is done by raising AttributeError. This allows one to have properties active on an instance, and have virtual - attributes on the class with the same name (see Enum for an example). + attributes on the class with the same name (see :class:`enum.Enum` for an example). .. versionadded:: 3.4 From fcb285609a2e55f2dc63dcfbb32e4e2fddf71546 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sun, 10 May 2020 11:53:16 +0300 Subject: [PATCH 053/115] bpo-40397: Remove __args__ and __parameters__ from _SpecialGenericAlias (GH-19984) --- Lib/typing.py | 167 +++++++++--------- .../2020-05-07-21-22-04.bpo-40397.PVWFAn.rst | 2 + 2 files changed, 89 insertions(+), 80 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst diff --git a/Lib/typing.py b/Lib/typing.py index 681ab6d21e0a32..e31fc99e02245e 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -186,14 +186,13 @@ def _collect_type_vars(types): return tuple(tvars) -def _check_generic(cls, parameters): +def _check_generic(cls, parameters, elen): """Check correct count for parameters of a generic cls (internal helper). This gives a nice error message in case of count mismatch. """ - if not cls.__parameters__: + if not elen: raise TypeError(f"{cls} is not a generic class") alen = len(parameters) - elen = len(cls.__parameters__) if alen != elen: raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" f" actual {alen}, expected {elen}") @@ -592,17 +591,6 @@ def __reduce__(self): return self.__name__ -# Special typing constructs Union, Optional, Generic, Callable and Tuple -# use three special attributes for internal bookkeeping of generic types: -# * __parameters__ is a tuple of unique free type parameters of a generic -# type, for example, Dict[T, T].__parameters__ == (T,); -# * __origin__ keeps a reference to a type that was subscripted, -# e.g., Union[T, int].__origin__ == Union, or the non-generic version of -# the type. -# * __args__ is a tuple of all arguments used in subscripting, -# e.g., Dict[T, int].__args__ == (T, int). - - def _is_dunder(attr): return attr.startswith('__') and attr.endswith('__') @@ -615,28 +603,11 @@ class _BaseGenericAlias(_Final, _root=True): have 'name' always set. If 'inst' is False, then the alias can't be instantiated, this is used by e.g. typing.List and typing.Dict. """ - def __init__(self, origin, params, *, inst=True, name=None): + def __init__(self, origin, *, inst=True, name=None): self._inst = inst self._name = name - if not isinstance(params, tuple): - params = (params,) self.__origin__ = origin - self.__args__ = tuple(... if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in params) - self.__parameters__ = _collect_type_vars(params) self.__slots__ = None # This is not documented. - if not name: - self.__module__ = origin.__module__ - - def __eq__(self, other): - if not isinstance(other, _BaseGenericAlias): - return NotImplemented - return (self.__origin__ == other.__origin__ - and self.__args__ == other.__args__) - - def __hash__(self): - return hash((self.__origin__, self.__args__)) def __call__(self, *args, **kwargs): if not self._inst: @@ -669,7 +640,7 @@ def __getattr__(self, attr): raise AttributeError(attr) def __setattr__(self, attr, val): - if _is_dunder(attr) or attr in ('_name', '_inst'): + if _is_dunder(attr) or attr in ('_name', '_inst', '_nparams'): super().__setattr__(attr, val) else: setattr(self.__origin__, attr, val) @@ -682,7 +653,38 @@ def __subclasscheck__(self, cls): " class and instance checks") +# Special typing constructs Union, Optional, Generic, Callable and Tuple +# use three special attributes for internal bookkeeping of generic types: +# * __parameters__ is a tuple of unique free type parameters of a generic +# type, for example, Dict[T, T].__parameters__ == (T,); +# * __origin__ keeps a reference to a type that was subscripted, +# e.g., Union[T, int].__origin__ == Union, or the non-generic version of +# the type. +# * __args__ is a tuple of all arguments used in subscripting, +# e.g., Dict[T, int].__args__ == (T, int). + + class _GenericAlias(_BaseGenericAlias, _root=True): + def __init__(self, origin, params, *, inst=True, name=None): + super().__init__(origin, inst=inst, name=name) + if not isinstance(params, tuple): + params = (params,) + self.__args__ = tuple(... if a is _TypingEllipsis else + () if a is _TypingEmpty else + a for a in params) + self.__parameters__ = _collect_type_vars(params) + if not name: + self.__module__ = origin.__module__ + + def __eq__(self, other): + if not isinstance(other, _GenericAlias): + return NotImplemented + return (self.__origin__ == other.__origin__ + and self.__args__ == other.__args__) + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + @_tp_cache def __getitem__(self, params): if self.__origin__ in (Generic, Protocol): @@ -692,14 +694,14 @@ def __getitem__(self, params): params = (params,) msg = "Parameters to generic types must be types." params = tuple(_type_check(p, msg) for p in params) - _check_generic(self, params) + _check_generic(self, params, len(self.__parameters__)) subst = dict(zip(self.__parameters__, params)) new_args = [] for arg in self.__args__: if isinstance(arg, TypeVar): arg = subst[arg] - elif isinstance(arg, (_BaseGenericAlias, GenericAlias)): + elif isinstance(arg, (_GenericAlias, GenericAlias)): subargs = tuple(subst[x] for x in arg.__parameters__) arg = arg[subargs] new_args.append(arg) @@ -739,11 +741,16 @@ def __mro_entries__(self, bases): return (self.__origin__,) +# _nparams is the number of accepted parameters, e.g. 0 for Hashable, +# 1 for List and 2 for Dict. It may be -1 if variable number of +# parameters are accepted (needs custom __getitem__). + class _SpecialGenericAlias(_BaseGenericAlias, _root=True): - def __init__(self, origin, params, *, inst=True, name=None): + def __init__(self, origin, nparams, *, inst=True, name=None): if name is None: name = origin.__name__ - super().__init__(origin, params, inst=inst, name=name) + super().__init__(origin, inst=inst, name=name) + self._nparams = nparams self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}' @_tp_cache @@ -752,8 +759,7 @@ def __getitem__(self, params): params = (params,) msg = "Parameters to generic types must be types." params = tuple(_type_check(p, msg) for p in params) - _check_generic(self, params) - assert self.__args__ == self.__parameters__ + _check_generic(self, params, self._nparams) return self.copy_with(params) def copy_with(self, params): @@ -912,7 +918,7 @@ def __class_getitem__(cls, params): f"Parameters to {cls.__name__}[...] must all be unique") else: # Subscripting a regular Generic subclass. - _check_generic(cls, params) + _check_generic(cls, params, len(cls.__parameters__)) return _GenericAlias(cls, params) def __init_subclass__(cls, *args, **kwargs): @@ -1571,18 +1577,18 @@ class Other(Leaf): # Error reported by type checker # Various ABCs mimicking those in collections.abc. _alias = _SpecialGenericAlias -Hashable = _alias(collections.abc.Hashable, ()) # Not generic. -Awaitable = _alias(collections.abc.Awaitable, T_co) -Coroutine = _alias(collections.abc.Coroutine, (T_co, T_contra, V_co)) -AsyncIterable = _alias(collections.abc.AsyncIterable, T_co) -AsyncIterator = _alias(collections.abc.AsyncIterator, T_co) -Iterable = _alias(collections.abc.Iterable, T_co) -Iterator = _alias(collections.abc.Iterator, T_co) -Reversible = _alias(collections.abc.Reversible, T_co) -Sized = _alias(collections.abc.Sized, ()) # Not generic. -Container = _alias(collections.abc.Container, T_co) -Collection = _alias(collections.abc.Collection, T_co) -Callable = _CallableType(collections.abc.Callable, ()) +Hashable = _alias(collections.abc.Hashable, 0) # Not generic. +Awaitable = _alias(collections.abc.Awaitable, 1) +Coroutine = _alias(collections.abc.Coroutine, 3) +AsyncIterable = _alias(collections.abc.AsyncIterable, 1) +AsyncIterator = _alias(collections.abc.AsyncIterator, 1) +Iterable = _alias(collections.abc.Iterable, 1) +Iterator = _alias(collections.abc.Iterator, 1) +Reversible = _alias(collections.abc.Reversible, 1) +Sized = _alias(collections.abc.Sized, 0) # Not generic. +Container = _alias(collections.abc.Container, 1) +Collection = _alias(collections.abc.Collection, 1) +Callable = _CallableType(collections.abc.Callable, 2) Callable.__doc__ = \ """Callable type; Callable[[int], str] is a function of (int) -> str. @@ -1593,15 +1599,16 @@ class Other(Leaf): # Error reported by type checker There is no syntax to indicate optional or keyword arguments, such function types are rarely used as callback types. """ -AbstractSet = _alias(collections.abc.Set, T_co, name='AbstractSet') -MutableSet = _alias(collections.abc.MutableSet, T) +AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet') +MutableSet = _alias(collections.abc.MutableSet, 1) # NOTE: Mapping is only covariant in the value type. -Mapping = _alias(collections.abc.Mapping, (KT, VT_co)) -MutableMapping = _alias(collections.abc.MutableMapping, (KT, VT)) -Sequence = _alias(collections.abc.Sequence, T_co) -MutableSequence = _alias(collections.abc.MutableSequence, T) -ByteString = _alias(collections.abc.ByteString, ()) # Not generic -Tuple = _TupleType(tuple, (), inst=False, name='Tuple') +Mapping = _alias(collections.abc.Mapping, 2) +MutableMapping = _alias(collections.abc.MutableMapping, 2) +Sequence = _alias(collections.abc.Sequence, 1) +MutableSequence = _alias(collections.abc.MutableSequence, 1) +ByteString = _alias(collections.abc.ByteString, 0) # Not generic +# Tuple accepts variable number of parameters. +Tuple = _TupleType(tuple, -1, inst=False, name='Tuple') Tuple.__doc__ = \ """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. @@ -1611,24 +1618,24 @@ class Other(Leaf): # Error reported by type checker To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. """ -List = _alias(list, T, inst=False, name='List') -Deque = _alias(collections.deque, T, name='Deque') -Set = _alias(set, T, inst=False, name='Set') -FrozenSet = _alias(frozenset, T_co, inst=False, name='FrozenSet') -MappingView = _alias(collections.abc.MappingView, T_co) -KeysView = _alias(collections.abc.KeysView, KT) -ItemsView = _alias(collections.abc.ItemsView, (KT, VT_co)) -ValuesView = _alias(collections.abc.ValuesView, VT_co) -ContextManager = _alias(contextlib.AbstractContextManager, T_co, name='ContextManager') -AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, T_co, name='AsyncContextManager') -Dict = _alias(dict, (KT, VT), inst=False, name='Dict') -DefaultDict = _alias(collections.defaultdict, (KT, VT), name='DefaultDict') -OrderedDict = _alias(collections.OrderedDict, (KT, VT)) -Counter = _alias(collections.Counter, T) -ChainMap = _alias(collections.ChainMap, (KT, VT)) -Generator = _alias(collections.abc.Generator, (T_co, T_contra, V_co)) -AsyncGenerator = _alias(collections.abc.AsyncGenerator, (T_co, T_contra)) -Type = _alias(type, CT_co, inst=False, name='Type') +List = _alias(list, 1, inst=False, name='List') +Deque = _alias(collections.deque, 1, name='Deque') +Set = _alias(set, 1, inst=False, name='Set') +FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet') +MappingView = _alias(collections.abc.MappingView, 1) +KeysView = _alias(collections.abc.KeysView, 1) +ItemsView = _alias(collections.abc.ItemsView, 2) +ValuesView = _alias(collections.abc.ValuesView, 1) +ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager') +AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager') +Dict = _alias(dict, 2, inst=False, name='Dict') +DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict') +OrderedDict = _alias(collections.OrderedDict, 2) +Counter = _alias(collections.Counter, 1) +ChainMap = _alias(collections.ChainMap, 2) +Generator = _alias(collections.abc.Generator, 3) +AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2) +Type = _alias(type, 1, inst=False, name='Type') Type.__doc__ = \ """A special construct usable to annotate class objects. @@ -2122,8 +2129,8 @@ class io: io.__name__ = __name__ + '.io' sys.modules[io.__name__] = io -Pattern = _alias(stdlib_re.Pattern, AnyStr) -Match = _alias(stdlib_re.Match, AnyStr) +Pattern = _alias(stdlib_re.Pattern, 1) +Match = _alias(stdlib_re.Match, 1) class re: """Wrapper namespace for re type aliases.""" diff --git a/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst b/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst new file mode 100644 index 00000000000000..46e806a2dc2222 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst @@ -0,0 +1,2 @@ +Removed attributes ``__args__`` and ``__parameters__`` from special generic +aliases like ``typing.List`` (not subscripted). From 1c2fa781560608aa4be50c748d4b3f403cfa5035 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Sun, 10 May 2020 11:05:29 +0200 Subject: [PATCH 054/115] bpo-40549: Convert posixmodule.c to multiphase init (GH-19982) Convert posixmodule.c ("posix" or "nt" module) to the multiphase initialization (PEP 489). * Create the module using PyModuleDef_Init(). * Create ScandirIteratorType and DirEntryType with the new PyType_FromModuleAndSpec() (PEP 573) * Get the module state from ScandirIteratorType and DirEntryType with the new PyType_GetModule() (PEP 573) * Pass module to functions which access the module state. * convert_sched_param() gets a new module parameter. It is now called directly since Argument Clinic doesn't support passing the module to an argument converter callback. * Remove _posixstate_global macro. --- .../2020-05-07-20-11-51.bpo-40549.6FiRSV.rst | 2 + Modules/clinic/posixmodule.c.h | 23 +- Modules/posixmodule.c | 251 ++++++++++-------- 3 files changed, 147 insertions(+), 129 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst diff --git a/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst b/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst new file mode 100644 index 00000000000000..873ff49c1eb00b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst @@ -0,0 +1,2 @@ +Convert posixmodule.c ("posix" or "nt" module) to the multiphase +initialization (PEP 489). diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index a2b4566443b517..cf6d7449bac832 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -2886,7 +2886,7 @@ PyDoc_STRVAR(os_sched_setscheduler__doc__, static PyObject * os_sched_setscheduler_impl(PyObject *module, pid_t pid, int policy, - struct sched_param *param); + PyObject *param_obj); static PyObject * os_sched_setscheduler(PyObject *module, PyObject *const *args, Py_ssize_t nargs) @@ -2894,13 +2894,13 @@ os_sched_setscheduler(PyObject *module, PyObject *const *args, Py_ssize_t nargs) PyObject *return_value = NULL; pid_t pid; int policy; - struct sched_param param; + PyObject *param_obj; - if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "iO&:sched_setscheduler", - &pid, &policy, convert_sched_param, ¶m)) { + if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "iO:sched_setscheduler", + &pid, &policy, ¶m_obj)) { goto exit; } - return_value = os_sched_setscheduler_impl(module, pid, policy, ¶m); + return_value = os_sched_setscheduler_impl(module, pid, policy, param_obj); exit: return return_value; @@ -2957,21 +2957,20 @@ PyDoc_STRVAR(os_sched_setparam__doc__, {"sched_setparam", (PyCFunction)(void(*)(void))os_sched_setparam, METH_FASTCALL, os_sched_setparam__doc__}, static PyObject * -os_sched_setparam_impl(PyObject *module, pid_t pid, - struct sched_param *param); +os_sched_setparam_impl(PyObject *module, pid_t pid, PyObject *param_obj); static PyObject * os_sched_setparam(PyObject *module, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; pid_t pid; - struct sched_param param; + PyObject *param_obj; - if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "O&:sched_setparam", - &pid, convert_sched_param, ¶m)) { + if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "O:sched_setparam", + &pid, ¶m_obj)) { goto exit; } - return_value = os_sched_setparam_impl(module, pid, ¶m); + return_value = os_sched_setparam_impl(module, pid, param_obj); exit: return return_value; @@ -9418,4 +9417,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=ba73b68f1c435ff6 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=be90d3aba972098b input=a9049054013a1b77]*/ diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 0163b0757aefa5..60a60e9aed76b6 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -834,7 +834,6 @@ typedef struct { PyObject *st_mode; } _posixstate; -static struct PyModuleDef posixmodule; static inline _posixstate* get_posix_state(PyObject *module) @@ -844,8 +843,6 @@ get_posix_state(PyObject *module) return (_posixstate *)state; } -#define _posixstate_global ((_posixstate *)PyModule_GetState(PyState_FindModule(&posixmodule))) - /* * A PyArg_ParseTuple "converter" function * that handles filesystem paths in the manner @@ -2156,7 +2153,7 @@ _posix_free(void *module) } static void -fill_time(PyObject *v, int index, time_t sec, unsigned long nsec) +fill_time(PyObject *module, PyObject *v, int index, time_t sec, unsigned long nsec) { PyObject *s = _PyLong_FromTime_t(sec); PyObject *ns_fractional = PyLong_FromUnsignedLong(nsec); @@ -2167,7 +2164,7 @@ fill_time(PyObject *v, int index, time_t sec, unsigned long nsec) if (!(s && ns_fractional)) goto exit; - s_in_ns = PyNumber_Multiply(s, _posixstate_global->billion); + s_in_ns = PyNumber_Multiply(s, get_posix_state(module)->billion); if (!s_in_ns) goto exit; @@ -2197,10 +2194,10 @@ fill_time(PyObject *v, int index, time_t sec, unsigned long nsec) /* pack a system stat C structure into the Python stat tuple (used by posix_stat() and posix_fstat()) */ static PyObject* -_pystat_fromstructstat(STRUCT_STAT *st) +_pystat_fromstructstat(PyObject *module, STRUCT_STAT *st) { unsigned long ansec, mnsec, cnsec; - PyObject *StatResultType = _posixstate_global->StatResultType; + PyObject *StatResultType = get_posix_state(module)->StatResultType; PyObject *v = PyStructSequence_New((PyTypeObject *)StatResultType); if (v == NULL) return NULL; @@ -2239,9 +2236,9 @@ _pystat_fromstructstat(STRUCT_STAT *st) #else ansec = mnsec = cnsec = 0; #endif - fill_time(v, 7, st->st_atime, ansec); - fill_time(v, 8, st->st_mtime, mnsec); - fill_time(v, 9, st->st_ctime, cnsec); + fill_time(module, v, 7, st->st_atime, ansec); + fill_time(module, v, 8, st->st_mtime, mnsec); + fill_time(module, v, 9, st->st_ctime, cnsec); #ifdef HAVE_STRUCT_STAT_ST_BLKSIZE PyStructSequence_SET_ITEM(v, ST_BLKSIZE_IDX, @@ -2303,7 +2300,7 @@ _pystat_fromstructstat(STRUCT_STAT *st) static PyObject * -posix_do_stat(const char *function_name, path_t *path, +posix_do_stat(PyObject *module, const char *function_name, path_t *path, int dir_fd, int follow_symlinks) { STRUCT_STAT st; @@ -2348,7 +2345,7 @@ posix_do_stat(const char *function_name, path_t *path, return path_error(path); } - return _pystat_fromstructstat(&st); + return _pystat_fromstructstat(module, &st); } /*[python input] @@ -2643,13 +2640,8 @@ class confstr_confname_converter(path_confname_converter): class sysconf_confname_converter(path_confname_converter): converter="conv_sysconf_confname" -class sched_param_converter(CConverter): - type = 'struct sched_param' - converter = 'convert_sched_param' - impl_by_reference = True; - [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=418fce0e01144461]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=f1c8ae8d744f6c8b]*/ /*[clinic input] @@ -2686,7 +2678,7 @@ static PyObject * os_stat_impl(PyObject *module, path_t *path, int dir_fd, int follow_symlinks) /*[clinic end generated code: output=7d4976e6f18a59c5 input=01d362ebcc06996b]*/ { - return posix_do_stat("stat", path, dir_fd, follow_symlinks); + return posix_do_stat(module, "stat", path, dir_fd, follow_symlinks); } @@ -2710,7 +2702,7 @@ os_lstat_impl(PyObject *module, path_t *path, int dir_fd) /*[clinic end generated code: output=ef82a5d35ce8ab37 input=0b7474765927b925]*/ { int follow_symlinks = 0; - return posix_do_stat("lstat", path, dir_fd, follow_symlinks); + return posix_do_stat(module, "lstat", path, dir_fd, follow_symlinks); } @@ -4852,11 +4844,11 @@ utime_default(utime_t *ut, const char *path) #endif static int -split_py_long_to_s_and_ns(PyObject *py_long, time_t *s, long *ns) +split_py_long_to_s_and_ns(PyObject *module, PyObject *py_long, time_t *s, long *ns) { int result = 0; PyObject *divmod; - divmod = PyNumber_Divmod(py_long, _posixstate_global->billion); + divmod = PyNumber_Divmod(py_long, get_posix_state(module)->billion); if (!divmod) goto exit; if (!PyTuple_Check(divmod) || PyTuple_GET_SIZE(divmod) != 2) { @@ -4968,9 +4960,9 @@ os_utime_impl(PyObject *module, path_t *path, PyObject *times, PyObject *ns, return NULL; } utime.now = 0; - if (!split_py_long_to_s_and_ns(PyTuple_GET_ITEM(ns, 0), + if (!split_py_long_to_s_and_ns(module, PyTuple_GET_ITEM(ns, 0), &utime.atime_s, &utime.atime_ns) || - !split_py_long_to_s_and_ns(PyTuple_GET_ITEM(ns, 1), + !split_py_long_to_s_and_ns(module, PyTuple_GET_ITEM(ns, 1), &utime.mtime_s, &utime.mtime_ns)) { return NULL; } @@ -5421,11 +5413,11 @@ enum posix_spawn_file_actions_identifier { #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) static int -convert_sched_param(PyObject *param, struct sched_param *res); +convert_sched_param(PyObject *module, PyObject *param, struct sched_param *res); #endif static int -parse_posix_spawn_flags(const char *func_name, PyObject *setpgroup, +parse_posix_spawn_flags(PyObject *module, const char *func_name, PyObject *setpgroup, int resetids, int setsid, PyObject *setsigmask, PyObject *setsigdef, PyObject *scheduler, posix_spawnattr_t *attrp) @@ -5495,11 +5487,15 @@ parse_posix_spawn_flags(const char *func_name, PyObject *setpgroup, if (scheduler) { #ifdef POSIX_SPAWN_SETSCHEDULER PyObject *py_schedpolicy; + PyObject *schedparam_obj; struct sched_param schedparam; - if (!PyArg_ParseTuple(scheduler, "OO&" + if (!PyArg_ParseTuple(scheduler, "OO" ";A scheduler tuple must have two elements", - &py_schedpolicy, convert_sched_param, &schedparam)) { + &py_schedpolicy, &schedparam_obj)) { + goto fail; + } + if (!convert_sched_param(module, schedparam_obj, &schedparam)) { goto fail; } if (py_schedpolicy != Py_None) { @@ -5728,7 +5724,7 @@ py_posix_spawn(int use_posix_spawnp, PyObject *module, path_t *path, PyObject *a file_actionsp = &file_actions_buf; } - if (parse_posix_spawn_flags(func_name, setpgroup, resetids, setsid, + if (parse_posix_spawn_flags(module, func_name, setpgroup, resetids, setsid, setsigmask, setsigdef, scheduler, &attr)) { goto exit; } @@ -6378,11 +6374,11 @@ static PyStructSequence_Desc sched_param_desc = { }; static int -convert_sched_param(PyObject *param, struct sched_param *res) +convert_sched_param(PyObject *module, PyObject *param, struct sched_param *res) { long priority; - if (!Py_IS_TYPE(param, (PyTypeObject *)_posixstate_global->SchedParamType)) { + if (!Py_IS_TYPE(param, (PyTypeObject *)get_posix_state(module)->SchedParamType)) { PyErr_SetString(PyExc_TypeError, "must have a sched_param object"); return 0; } @@ -6405,7 +6401,7 @@ os.sched_setscheduler pid: pid_t policy: int - param: sched_param + param as param_obj: object / Set the scheduling policy for the process identified by pid. @@ -6416,15 +6412,20 @@ param is an instance of sched_param. static PyObject * os_sched_setscheduler_impl(PyObject *module, pid_t pid, int policy, - struct sched_param *param) -/*[clinic end generated code: output=b0ac0a70d3b1d705 input=c581f9469a5327dd]*/ + PyObject *param_obj) +/*[clinic end generated code: output=cde27faa55dc993e input=73013d731bd8fbe9]*/ { + struct sched_param param; + if (!convert_sched_param(module, param_obj, ¶m)) { + return NULL; + } + /* ** sched_setscheduler() returns 0 in Linux, but the previous ** scheduling policy under Solaris/Illumos, and others. ** On error, -1 is returned in all Operating Systems. */ - if (sched_setscheduler(pid, policy, param) == -1) + if (sched_setscheduler(pid, policy, ¶m) == -1) return posix_error(); Py_RETURN_NONE; } @@ -6453,7 +6454,7 @@ os_sched_getparam_impl(PyObject *module, pid_t pid) if (sched_getparam(pid, ¶m)) return posix_error(); - PyObject *SchedParamType = _posixstate_global->SchedParamType; + PyObject *SchedParamType = get_posix_state(module)->SchedParamType; result = PyStructSequence_New((PyTypeObject *)SchedParamType); if (!result) return NULL; @@ -6470,7 +6471,7 @@ os_sched_getparam_impl(PyObject *module, pid_t pid) /*[clinic input] os.sched_setparam pid: pid_t - param: sched_param + param as param_obj: object / Set scheduling parameters for the process identified by pid. @@ -6480,11 +6481,15 @@ param should be an instance of sched_param. [clinic start generated code]*/ static PyObject * -os_sched_setparam_impl(PyObject *module, pid_t pid, - struct sched_param *param) -/*[clinic end generated code: output=8af013f78a32b591 input=6b8d6dfcecdc21bd]*/ +os_sched_setparam_impl(PyObject *module, pid_t pid, PyObject *param_obj) +/*[clinic end generated code: output=f19fe020a53741c1 input=27b98337c8b2dcc7]*/ { - if (sched_setparam(pid, param)) + struct sched_param param; + if (!convert_sched_param(module, param_obj, ¶m)) { + return NULL; + } + + if (sched_setparam(pid, ¶m)) return posix_error(); Py_RETURN_NONE; } @@ -7710,7 +7715,7 @@ os_setgroups(PyObject *module, PyObject *groups) #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) static PyObject * -wait_helper(pid_t pid, int status, struct rusage *ru) +wait_helper(PyObject *module, pid_t pid, int status, struct rusage *ru) { PyObject *result; PyObject *struct_rusage; @@ -7727,7 +7732,7 @@ wait_helper(pid_t pid, int status, struct rusage *ru) PyObject *m = PyImport_ImportModuleNoBlock("resource"); if (m == NULL) return NULL; - struct_rusage = PyObject_GetAttr(m, _posixstate_global->struct_rusage); + struct_rusage = PyObject_GetAttr(m, get_posix_state(module)->struct_rusage); Py_DECREF(m); if (struct_rusage == NULL) return NULL; @@ -7803,7 +7808,7 @@ os_wait3_impl(PyObject *module, int options) if (pid < 0) return (!async_err) ? posix_error() : NULL; - return wait_helper(pid, WAIT_STATUS_INT(status), &ru); + return wait_helper(module, pid, WAIT_STATUS_INT(status), &ru); } #endif /* HAVE_WAIT3 */ @@ -7840,7 +7845,7 @@ os_wait4_impl(PyObject *module, pid_t pid, int options) if (res < 0) return (!async_err) ? posix_error() : NULL; - return wait_helper(res, WAIT_STATUS_INT(status), &ru); + return wait_helper(module, res, WAIT_STATUS_INT(status), &ru); } #endif /* HAVE_WAIT4 */ @@ -8375,11 +8380,11 @@ static PyStructSequence_Desc times_result_desc = { #ifdef HAVE_TIMES static PyObject * -build_times_result(double user, double system, +build_times_result(PyObject *module, double user, double system, double children_user, double children_system, double elapsed) { - PyObject *TimesResultType = _posixstate_global->TimesResultType; + PyObject *TimesResultType = get_posix_state(module)->TimesResultType; PyObject *value = PyStructSequence_New((PyTypeObject *)TimesResultType); if (value == NULL) return NULL; @@ -8435,7 +8440,7 @@ os_times_impl(PyObject *module) 1e7 is one second in such units; 1e-7 the inverse. 429.4967296 is 2**32 / 1e7 or 2**32 * 1e-7. */ - return build_times_result( + return build_times_result(module, (double)(user.dwHighDateTime*429.4967296 + user.dwLowDateTime*1e-7), (double)(kernel.dwHighDateTime*429.4967296 + @@ -8454,7 +8459,7 @@ os_times_impl(PyObject *module) c = times(&t); if (c == (clock_t) -1) return posix_error(); - return build_times_result( + return build_times_result(module, (double)t.tms_utime / ticks_per_second, (double)t.tms_stime / ticks_per_second, (double)t.tms_cutime / ticks_per_second, @@ -9515,7 +9520,7 @@ os_fstat_impl(PyObject *module, int fd) #endif } - return _pystat_fromstructstat(&st); + return _pystat_fromstructstat(module, &st); } @@ -10601,8 +10606,8 @@ os_WSTOPSIG_impl(PyObject *module, int status) #include static PyObject* -_pystatvfs_fromstructstatvfs(struct statvfs st) { - PyObject *StatVFSResultType = _posixstate_global->StatVFSResultType; +_pystatvfs_fromstructstatvfs(PyObject *module, struct statvfs st) { + PyObject *StatVFSResultType = get_posix_state(module)->StatVFSResultType; PyObject *v = PyStructSequence_New((PyTypeObject *)StatVFSResultType); if (v == NULL) return NULL; @@ -10679,7 +10684,7 @@ os_fstatvfs_impl(PyObject *module, int fd) if (result != 0) return (!async_err) ? posix_error() : NULL; - return _pystatvfs_fromstructstatvfs(st); + return _pystatvfs_fromstructstatvfs(module, st); } #endif /* defined(HAVE_FSTATVFS) && defined(HAVE_SYS_STATVFS_H) */ @@ -10726,7 +10731,7 @@ os_statvfs_impl(PyObject *module, path_t *path) return path_error(path); } - return _pystatvfs_fromstructstatvfs(st); + return _pystatvfs_fromstructstatvfs(module, st); } #endif /* defined(HAVE_STATVFS) && defined(HAVE_SYS_STATVFS_H) */ @@ -12768,6 +12773,12 @@ os_DirEntry_is_symlink_impl(DirEntry *self) #endif } +static inline PyObject* +DirEntry_get_module(DirEntry *self) +{ + return PyType_GetModule(Py_TYPE(self)); +} + static PyObject * DirEntry_fetch_stat(DirEntry *self, int follow_symlinks) { @@ -12805,7 +12816,7 @@ DirEntry_fetch_stat(DirEntry *self, int follow_symlinks) if (result != 0) return path_object_error(self->path); - return _pystat_fromstructstat(&st); + return _pystat_fromstructstat(DirEntry_get_module(self), &st); } static PyObject * @@ -12813,7 +12824,8 @@ DirEntry_get_lstat(DirEntry *self) { if (!self->lstat) { #ifdef MS_WINDOWS - self->lstat = _pystat_fromstructstat(&self->win32_lstat); + self->lstat = _pystat_fromstructstat(DirEntry_get_module(self), + &self->win32_lstat); #else /* POSIX */ self->lstat = DirEntry_fetch_stat(self, 0); #endif @@ -12888,7 +12900,7 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits } goto error; } - st_mode = PyObject_GetAttr(stat, _posixstate_global->st_mode); + st_mode = PyObject_GetAttr(stat, get_posix_state(DirEntry_get_module(self))->st_mode); if (!st_mode) goto error; @@ -13092,14 +13104,14 @@ join_path_filenameW(const wchar_t *path_wide, const wchar_t *filename) } static PyObject * -DirEntry_from_find_data(path_t *path, WIN32_FIND_DATAW *dataW) +DirEntry_from_find_data(PyObject *module, path_t *path, WIN32_FIND_DATAW *dataW) { DirEntry *entry; BY_HANDLE_FILE_INFORMATION file_info; ULONG reparse_tag; wchar_t *joined_path; - PyObject *DirEntryType = _posixstate_global->DirEntryType; + PyObject *DirEntryType = get_posix_state(module)->DirEntryType; entry = PyObject_New(DirEntry, (PyTypeObject *)DirEntryType); if (!entry) return NULL; @@ -13177,8 +13189,8 @@ join_path_filename(const char *path_narrow, const char* filename, Py_ssize_t fil } static PyObject * -DirEntry_from_posix_info(path_t *path, const char *name, Py_ssize_t name_len, - ino_t d_ino +DirEntry_from_posix_info(PyObject *module, path_t *path, const char *name, + Py_ssize_t name_len, ino_t d_ino #ifdef HAVE_DIRENT_D_TYPE , unsigned char d_type #endif @@ -13187,7 +13199,7 @@ DirEntry_from_posix_info(path_t *path, const char *name, Py_ssize_t name_len, DirEntry *entry; char *joined_path; - PyObject *DirEntryType = _posixstate_global->DirEntryType; + PyObject *DirEntryType = get_posix_state(module)->DirEntryType; entry = PyObject_New(DirEntry, (PyTypeObject *)DirEntryType); if (!entry) return NULL; @@ -13307,8 +13319,10 @@ ScandirIterator_iternext(ScandirIterator *iterator) /* Skip over . and .. */ if (wcscmp(file_data->cFileName, L".") != 0 && - wcscmp(file_data->cFileName, L"..") != 0) { - entry = DirEntry_from_find_data(&iterator->path, file_data); + wcscmp(file_data->cFileName, L"..") != 0) + { + PyObject *module = PyType_GetModule(Py_TYPE(iterator)); + entry = DirEntry_from_find_data(module, &iterator->path, file_data); if (!entry) break; return entry; @@ -13379,10 +13393,12 @@ ScandirIterator_iternext(ScandirIterator *iterator) is_dot = direntp->d_name[0] == '.' && (name_len == 1 || (direntp->d_name[1] == '.' && name_len == 2)); if (!is_dot) { - entry = DirEntry_from_posix_info(&iterator->path, direntp->d_name, - name_len, direntp->d_ino + PyObject *module = PyType_GetModule(Py_TYPE(iterator)); + entry = DirEntry_from_posix_info(module, + &iterator->path, direntp->d_name, + name_len, direntp->d_ino #ifdef HAVE_DIRENT_D_TYPE - , direntp->d_type + , direntp->d_type #endif ); if (!entry) @@ -14632,19 +14648,6 @@ all_ins(PyObject *m) } -static struct PyModuleDef posixmodule = { - PyModuleDef_HEAD_INIT, - MODNAME, - posix__doc__, - sizeof(_posixstate), - posix_methods, - NULL, - _posix_traverse, - _posix_clear, - _posix_free, -}; - - static const char * const have_functions[] = { #ifdef HAVE_FACCESSAT @@ -14779,35 +14782,25 @@ static const char * const have_functions[] = { }; -PyMODINIT_FUNC -INITFUNC(void) +static int +posixmodule_exec(PyObject *m) { - PyObject *m, *v; + PyObject *v; PyObject *list; const char * const *trace; - m = PyState_FindModule(&posixmodule); - if (m != NULL) { - Py_INCREF(m); - return m; - } - - m = PyModule_Create(&posixmodule); - if (m == NULL) - return NULL; - /* Initialize environ dictionary */ v = convertenviron(); Py_XINCREF(v); if (v == NULL || PyModule_AddObject(m, "environ", v) != 0) - return NULL; + return -1; Py_DECREF(v); if (all_ins(m)) - return NULL; + return -1; if (setup_confname_tables(m)) - return NULL; + return -1; Py_INCREF(PyExc_OSError); PyModule_AddObject(m, "error", PyExc_OSError); @@ -14816,7 +14809,7 @@ INITFUNC(void) waitid_result_desc.name = MODNAME ".waitid_result"; PyObject *WaitidResultType = (PyObject *)PyStructSequence_NewType(&waitid_result_desc); if (WaitidResultType == NULL) { - return NULL; + return -1; } Py_INCREF(WaitidResultType); PyModule_AddObject(m, "waitid_result", WaitidResultType); @@ -14829,7 +14822,7 @@ INITFUNC(void) stat_result_desc.fields[9].name = PyStructSequence_UnnamedField; PyObject *StatResultType = (PyObject *)PyStructSequence_NewType(&stat_result_desc); if (StatResultType == NULL) { - return NULL; + return -1; } Py_INCREF(StatResultType); PyModule_AddObject(m, "stat_result", StatResultType); @@ -14840,7 +14833,7 @@ INITFUNC(void) statvfs_result_desc.name = "os.statvfs_result"; /* see issue #19209 */ PyObject *StatVFSResultType = (PyObject *)PyStructSequence_NewType(&statvfs_result_desc); if (StatVFSResultType == NULL) { - return NULL; + return -1; } Py_INCREF(StatVFSResultType); PyModule_AddObject(m, "statvfs_result", StatVFSResultType); @@ -14859,7 +14852,7 @@ INITFUNC(void) sched_param_desc.name = MODNAME ".sched_param"; PyObject *SchedParamType = (PyObject *)PyStructSequence_NewType(&sched_param_desc); if (SchedParamType == NULL) { - return NULL; + return -1; } Py_INCREF(SchedParamType); PyModule_AddObject(m, "sched_param", SchedParamType); @@ -14870,22 +14863,22 @@ INITFUNC(void) /* initialize TerminalSize_info */ PyObject *TerminalSizeType = (PyObject *)PyStructSequence_NewType(&TerminalSize_desc); if (TerminalSizeType == NULL) { - return NULL; + return -1; } Py_INCREF(TerminalSizeType); PyModule_AddObject(m, "terminal_size", TerminalSizeType); get_posix_state(m)->TerminalSizeType = TerminalSizeType; /* initialize scandir types */ - PyObject *ScandirIteratorType = PyType_FromSpec(&ScandirIteratorType_spec); + PyObject *ScandirIteratorType = PyType_FromModuleAndSpec(m, &ScandirIteratorType_spec, NULL); if (ScandirIteratorType == NULL) { - return NULL; + return -1; } get_posix_state(m)->ScandirIteratorType = ScandirIteratorType; - PyObject *DirEntryType = PyType_FromSpec(&DirEntryType_spec); + PyObject *DirEntryType = PyType_FromModuleAndSpec(m, &DirEntryType_spec, NULL); if (DirEntryType == NULL) { - return NULL; + return -1; } Py_INCREF(DirEntryType); PyModule_AddObject(m, "DirEntry", DirEntryType); @@ -14894,7 +14887,7 @@ INITFUNC(void) times_result_desc.name = MODNAME ".times_result"; PyObject *TimesResultType = (PyObject *)PyStructSequence_NewType(×_result_desc); if (TimesResultType == NULL) { - return NULL; + return -1; } Py_INCREF(TimesResultType); PyModule_AddObject(m, "times_result", TimesResultType); @@ -14902,7 +14895,7 @@ INITFUNC(void) PyTypeObject *UnameResultType = PyStructSequence_NewType(&uname_result_desc); if (UnameResultType == NULL) { - return NULL; + return -1; } Py_INCREF(UnameResultType); PyModule_AddObject(m, "uname_result", (PyObject *)UnameResultType); @@ -14922,7 +14915,7 @@ INITFUNC(void) #ifdef HAVE_FSTATVFS if (fstatvfs == NULL) { if (PyObject_DelAttrString(m, "fstatvfs") == -1) { - return NULL; + return -1; } } #endif /* HAVE_FSTATVFS */ @@ -14930,7 +14923,7 @@ INITFUNC(void) #ifdef HAVE_STATVFS if (statvfs == NULL) { if (PyObject_DelAttrString(m, "statvfs") == -1) { - return NULL; + return -1; } } #endif /* HAVE_STATVFS */ @@ -14938,7 +14931,7 @@ INITFUNC(void) # ifdef HAVE_LCHOWN if (lchown == NULL) { if (PyObject_DelAttrString(m, "lchown") == -1) { - return NULL; + return -1; } } #endif /* HAVE_LCHOWN */ @@ -14947,15 +14940,15 @@ INITFUNC(void) #endif /* __APPLE__ */ if ((get_posix_state(m)->billion = PyLong_FromLong(1000000000)) == NULL) - return NULL; + return -1; #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) get_posix_state(m)->struct_rusage = PyUnicode_InternFromString("struct_rusage"); if (get_posix_state(m)->struct_rusage == NULL) - return NULL; + return -1; #endif get_posix_state(m)->st_mode = PyUnicode_InternFromString("st_mode"); if (get_posix_state(m)->st_mode == NULL) - return NULL; + return -1; /* suppress "function not used" warnings */ { @@ -14973,18 +14966,42 @@ INITFUNC(void) */ list = PyList_New(0); if (!list) - return NULL; + return -1; for (trace = have_functions; *trace; trace++) { PyObject *unicode = PyUnicode_DecodeASCII(*trace, strlen(*trace), NULL); if (!unicode) - return NULL; + return -1; if (PyList_Append(list, unicode)) - return NULL; + return -1; Py_DECREF(unicode); } PyModule_AddObject(m, "_have_functions", list); - return m; + return 0; +} + + +static PyModuleDef_Slot posixmodile_slots[] = { + {Py_mod_exec, posixmodule_exec}, + {0, NULL} +}; + +static struct PyModuleDef posixmodule = { + PyModuleDef_HEAD_INIT, + .m_name = MODNAME, + .m_doc = posix__doc__, + .m_size = sizeof(_posixstate), + .m_methods = posix_methods, + .m_slots = posixmodile_slots, + .m_traverse = _posix_traverse, + .m_clear = _posix_clear, + .m_free = _posix_free, +}; + +PyMODINIT_FUNC +INITFUNC(void) +{ + return PyModuleDef_Init(&posixmodule); } #ifdef __cplusplus From 86a93fddf72a2711aca99afa0c5374c8d6b4a321 Mon Sep 17 00:00:00 2001 From: Sergey Fedoseev Date: Sun, 10 May 2020 14:15:57 +0500 Subject: [PATCH 055/115] bpo-37986: Improve perfomance of PyLong_FromDouble() (GH-15611) * bpo-37986: Improve perfomance of PyLong_FromDouble() * Use strict bound check for safety and symmetry * Remove possibly outdated performance claims Co-authored-by: Mark Dickinson --- .../2019-11-20-09-50-58.bpo-37986.o0lmA7.rst | 2 ++ Objects/floatobject.c | 22 +------------------ Objects/longobject.c | 18 +++++++++++++-- 3 files changed, 19 insertions(+), 23 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst b/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst new file mode 100644 index 00000000000000..62446e35ae01ba --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst @@ -0,0 +1,2 @@ +Improve performance of :c:func:`PyLong_FromDouble` for values that fit into +:c:type:`long`. diff --git a/Objects/floatobject.c b/Objects/floatobject.c index faa02f2f05795c..9f5014092cf20a 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -862,27 +862,7 @@ static PyObject * float___trunc___impl(PyObject *self) /*[clinic end generated code: output=dd3e289dd4c6b538 input=591b9ba0d650fdff]*/ { - double x = PyFloat_AsDouble(self); - double wholepart; /* integral portion of x, rounded toward 0 */ - - (void)modf(x, &wholepart); - /* Try to get out cheap if this fits in a Python int. The attempt - * to cast to long must be protected, as C doesn't define what - * happens if the double is too big to fit in a long. Some rare - * systems raise an exception then (RISCOS was mentioned as one, - * and someone using a non-default option on Sun also bumped into - * that). Note that checking for >= and <= LONG_{MIN,MAX} would - * still be vulnerable: if a long has more bits of precision than - * a double, casting MIN/MAX to double may yield an approximation, - * and if that's rounded up, then, e.g., wholepart=LONG_MAX+1 would - * yield true from the C expression wholepart<=LONG_MAX, despite - * that wholepart is actually greater than LONG_MAX. - */ - if (LONG_MIN < wholepart && wholepart < LONG_MAX) { - const long aslong = (long)wholepart; - return PyLong_FromLong(aslong); - } - return PyLong_FromDouble(wholepart); + return PyLong_FromDouble(PyFloat_AS_DOUBLE(self)); } /*[clinic input] diff --git a/Objects/longobject.c b/Objects/longobject.c index 11fc75b918f77f..0ff0e80cd42696 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -416,6 +416,21 @@ PyLong_FromSize_t(size_t ival) PyObject * PyLong_FromDouble(double dval) { + /* Try to get out cheap if this fits in a long. When a finite value of real + * floating type is converted to an integer type, the value is truncated + * toward zero. If the value of the integral part cannot be represented by + * the integer type, the behavior is undefined. Thus, we must check that + * value is in range (LONG_MIN - 1, LONG_MAX + 1). If a long has more bits + * of precision than a double, casting LONG_MIN - 1 to double may yield an + * approximation, but LONG_MAX + 1 is a power of two and can be represented + * as double exactly (assuming FLT_RADIX is 2 or 16), so for simplicity + * check against [-(LONG_MAX + 1), LONG_MAX + 1). + */ + const double int_max = (unsigned long)LONG_MAX + 1; + if (-int_max < dval && dval < int_max) { + return PyLong_FromLong((long)dval); + } + PyLongObject *v; double frac; int i, ndig, expo, neg; @@ -435,8 +450,7 @@ PyLong_FromDouble(double dval) dval = -dval; } frac = frexp(dval, &expo); /* dval = frac*2**expo; 0.0 <= frac < 1.0 */ - if (expo <= 0) - return PyLong_FromLong(0L); + assert(expo > 0); ndig = (expo-1) / PyLong_SHIFT + 1; /* Number of 'digits' in result */ v = _PyLong_New(ndig); if (v == NULL) From 0122d48681b1df27015cf396559fb283ba364d6d Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sun, 10 May 2020 13:39:40 +0300 Subject: [PATCH 056/115] bpo-40397: Fix subscription of nested generic alias without parameters. (GH-20021) --- Lib/test/test_typing.py | 13 ++++++++++++- Lib/typing.py | 6 ++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 21bc7c81f2a30e..f429e883b59538 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -10,7 +10,7 @@ from typing import TypeVar, AnyStr from typing import T, KT, VT # Not in __all__. from typing import Union, Optional, Literal -from typing import Tuple, List, MutableMapping +from typing import Tuple, List, Dict, MutableMapping from typing import Callable from typing import Generic, ClassVar, Final, final, Protocol from typing import cast, runtime_checkable @@ -3173,6 +3173,17 @@ def test_frozenset(self): def test_dict(self): self.assertIsSubclass(dict, typing.Dict) + def test_dict_subscribe(self): + K = TypeVar('K') + V = TypeVar('V') + self.assertEqual(Dict[K, V][str, int], Dict[str, int]) + self.assertEqual(Dict[K, int][str], Dict[str, int]) + self.assertEqual(Dict[str, V][int], Dict[str, int]) + self.assertEqual(Dict[K, List[V]][str, int], Dict[str, List[int]]) + self.assertEqual(Dict[K, List[int]][str], Dict[str, List[int]]) + self.assertEqual(Dict[K, list[V]][str, int], Dict[str, list[int]]) + self.assertEqual(Dict[K, list[int]][str], Dict[str, list[int]]) + def test_no_list_instantiation(self): with self.assertRaises(TypeError): typing.List() diff --git a/Lib/typing.py b/Lib/typing.py index e31fc99e02245e..b5ba38e07c835b 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -702,8 +702,10 @@ def __getitem__(self, params): if isinstance(arg, TypeVar): arg = subst[arg] elif isinstance(arg, (_GenericAlias, GenericAlias)): - subargs = tuple(subst[x] for x in arg.__parameters__) - arg = arg[subargs] + subparams = arg.__parameters__ + if subparams: + subargs = tuple(subst[x] for x in subparams) + arg = arg[subargs] new_args.append(arg) return self.copy_with(tuple(new_args)) From 2fbc57af851814df567fb51054cb6f6a399f814a Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sun, 10 May 2020 15:14:27 +0300 Subject: [PATCH 057/115] bpo-40257: Tweak docstrings for special generic aliases. (GH-20022) * Add the terminating period. * Omit module name for builtin types. --- Lib/typing.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Lib/typing.py b/Lib/typing.py index b5ba38e07c835b..f94996daebd6ed 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -753,7 +753,10 @@ def __init__(self, origin, nparams, *, inst=True, name=None): name = origin.__name__ super().__init__(origin, inst=inst, name=name) self._nparams = nparams - self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}' + if origin.__module__ == 'builtins': + self.__doc__ = f'A generic version of {origin.__qualname__}.' + else: + self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.' @_tp_cache def __getitem__(self, params): From 2cc9b8486dd924214f9e5657672fdeb24449d206 Mon Sep 17 00:00:00 2001 From: Raymond Hettinger Date: Sun, 10 May 2020 14:53:29 -0700 Subject: [PATCH 058/115] Improve code clarity for the set lookup logic (GH-20028) --- Objects/setobject.c | 180 ++++++++++++++------------------------------ 1 file changed, 55 insertions(+), 125 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c index 0e4e45f60a9ccc..76b1944db45588 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -57,77 +57,43 @@ set_lookkey(PySetObject *so, PyObject *key, Py_hash_t hash) { setentry *table; setentry *entry; - size_t perturb; + size_t perturb = hash; size_t mask = so->mask; size_t i = (size_t)hash & mask; /* Unsigned for defined overflow behavior */ - size_t j; + int probes; int cmp; - entry = &so->table[i]; - if (entry->key == NULL) - return entry; - - perturb = hash; - while (1) { - if (entry->hash == hash) { - PyObject *startkey = entry->key; - /* startkey cannot be a dummy because the dummy hash field is -1 */ - assert(startkey != dummy); - if (startkey == key) - return entry; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - return entry; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp < 0) /* unlikely */ - return NULL; - if (table != so->table || entry->key != startkey) /* unlikely */ - return set_lookkey(so, key, hash); - if (cmp > 0) /* likely */ + entry = &so->table[i]; + probes = (i + LINEAR_PROBES <= mask) ? LINEAR_PROBES: 0; + do { + if (entry->hash == 0 && entry->key == NULL) return entry; - mask = so->mask; /* help avoid a register spill */ - } - - if (i + LINEAR_PROBES <= mask) { - for (j = 0 ; j < LINEAR_PROBES ; j++) { - entry++; - if (entry->hash == 0 && entry->key == NULL) + if (entry->hash == hash) { + PyObject *startkey = entry->key; + assert(startkey != dummy); + if (startkey == key) return entry; - if (entry->hash == hash) { - PyObject *startkey = entry->key; - assert(startkey != dummy); - if (startkey == key) - return entry; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - return entry; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp < 0) - return NULL; - if (table != so->table || entry->key != startkey) - return set_lookkey(so, key, hash); - if (cmp > 0) - return entry; - mask = so->mask; - } + if (PyUnicode_CheckExact(startkey) + && PyUnicode_CheckExact(key) + && _PyUnicode_EQ(startkey, key)) + return entry; + table = so->table; + Py_INCREF(startkey); + cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); + Py_DECREF(startkey); + if (cmp < 0) + return NULL; + if (table != so->table || entry->key != startkey) + return set_lookkey(so, key, hash); + if (cmp > 0) + return entry; + mask = so->mask; } - } - + entry++; + } while (probes--); perturb >>= PERTURB_SHIFT; i = (i * 5 + 1 + perturb) & mask; - - entry = &so->table[i]; - if (entry->key == NULL) - return entry; } } @@ -141,7 +107,7 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) size_t perturb; size_t mask; size_t i; /* Unsigned for defined overflow behavior */ - size_t j; + int probes; int cmp; /* Pre-increment is necessary to prevent arbitrary code in the rich @@ -152,75 +118,39 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) mask = so->mask; i = (size_t)hash & mask; - - entry = &so->table[i]; - if (entry->key == NULL) - goto found_unused; - perturb = hash; while (1) { - if (entry->hash == hash) { - PyObject *startkey = entry->key; - /* startkey cannot be a dummy because the dummy hash field is -1 */ - assert(startkey != dummy); - if (startkey == key) - goto found_active; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - goto found_active; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp > 0) /* likely */ - goto found_active; - if (cmp < 0) - goto comparison_error; - /* Continuing the search from the current entry only makes - sense if the table and entry are unchanged; otherwise, - we have to restart from the beginning */ - if (table != so->table || entry->key != startkey) - goto restart; - mask = so->mask; /* help avoid a register spill */ - } - - if (i + LINEAR_PROBES <= mask) { - for (j = 0 ; j < LINEAR_PROBES ; j++) { - entry++; - if (entry->hash == 0 && entry->key == NULL) - goto found_unused; - if (entry->hash == hash) { - PyObject *startkey = entry->key; - assert(startkey != dummy); - if (startkey == key) - goto found_active; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - goto found_active; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp > 0) - goto found_active; - if (cmp < 0) - goto comparison_error; - if (table != so->table || entry->key != startkey) - goto restart; - mask = so->mask; - } + entry = &so->table[i]; + probes = (i + LINEAR_PROBES <= mask) ? LINEAR_PROBES: 0; + do { + if (entry->hash == 0 && entry->key == NULL) + goto found_unused; + if (entry->hash == hash) { + PyObject *startkey = entry->key; + assert(startkey != dummy); + if (startkey == key) + goto found_active; + if (PyUnicode_CheckExact(startkey) + && PyUnicode_CheckExact(key) + && _PyUnicode_EQ(startkey, key)) + goto found_active; + table = so->table; + Py_INCREF(startkey); + cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); + Py_DECREF(startkey); + if (cmp > 0) + goto found_active; + if (cmp < 0) + goto comparison_error; + if (table != so->table || entry->key != startkey) + goto restart; + mask = so->mask; } - } - + entry++; + } while (probes--); perturb >>= PERTURB_SHIFT; i = (i * 5 + 1 + perturb) & mask; - - entry = &so->table[i]; - if (entry->key == NULL) - goto found_unused; } found_unused: From 5b956ca42de37c761562e9c9aeb96a0e67606e33 Mon Sep 17 00:00:00 2001 From: Pablo Galindo Date: Mon, 11 May 2020 01:41:26 +0100 Subject: [PATCH 059/115] bpo-40585: Normalize errors messages in codeop when comparing them (GH-20030) With the new parser, the error message contains always the trailing newlines, causing the comparison of the repr of the error messages in codeop to fail. This commit makes the new parser mirror the old parser's behaviour regarding trailing newlines. --- Lib/test/test_codeop.py | 9 +++++++++ .../2020-05-11-00-19-42.bpo-40585.yusknY.rst | 2 ++ Parser/pegen/pegen.c | 6 ++++++ 3 files changed, 17 insertions(+) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 1f27830ae50b84..0c5e362feea0ca 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -288,6 +288,15 @@ def test_invalid(self): ai("[i for i in range(10)] = (1, 2, 3)") + def test_invalid_exec(self): + ai = self.assertInvalid + ai("raise = 4", symbol="exec") + ai('def a-b', symbol='exec') + ai('await?', symbol='exec') + ai('=!=', symbol='exec') + ai('a await raise b', symbol='exec') + ai('a await raise b?+1', symbol='exec') + def test_filename(self): self.assertEqual(compile_command("a = 1\n", "abc").co_filename, compile("a = 1\n", "abc", 'single').co_filename) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst new file mode 100644 index 00000000000000..7a9258ef0a938b --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst @@ -0,0 +1,2 @@ +Fixed a bug when using :func:`codeop.compile_command` that was causing +exceptions to be swallowed with the new parser. Patch by Pablo Galindo diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 06af53b3597f74..c80f08668b07d6 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -310,6 +310,12 @@ get_error_line(char *buffer, int is_file) newline = strchr(buffer, '\n'); } + if (is_file) { + while (newline > buffer && newline[-1] == '\n') { + --newline; + } + } + if (newline) { return PyUnicode_DecodeUTF8(buffer, newline - buffer, "replace"); } From 6067d4bc3ce5ff4cfa5b47ceecc84a3941bc031c Mon Sep 17 00:00:00 2001 From: scoder Date: Mon, 11 May 2020 06:04:31 +0200 Subject: [PATCH 060/115] bpo-40575: Avoid unnecessary overhead in _PyDict_GetItemIdWithError() (GH-20018) Avoid unnecessary overhead in _PyDict_GetItemIdWithError() by calling _PyDict_GetItem_KnownHash() instead of the more generic PyDict_GetItemWithError(), since we already know the hash of interned strings. --- Objects/dictobject.c | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Objects/dictobject.c b/Objects/dictobject.c index fa35d16478f635..809a5ed7787370 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -1492,7 +1492,9 @@ _PyDict_GetItemIdWithError(PyObject *dp, struct _Py_Identifier *key) kv = _PyUnicode_FromId(key); /* borrowed */ if (kv == NULL) return NULL; - return PyDict_GetItemWithError(dp, kv); + Py_hash_t hash = ((PyASCIIObject *) kv)->hash; + assert (hash != -1); /* interned strings have their hash value initialised */ + return _PyDict_GetItem_KnownHash(dp, kv, hash); } PyObject * From d5d9a718662e67e2b1ac7874dda9df2d8d71d415 Mon Sep 17 00:00:00 2001 From: Inada Naoki Date: Mon, 11 May 2020 15:37:25 +0900 Subject: [PATCH 061/115] bpo-36346: array: Don't use deprecated APIs (GH-19653) * Py_UNICODE -> wchar_t * Py_UNICODE -> unicode in Argument Clinic * PyUnicode_AsUnicode -> PyUnicode_AsWideCharString * Don't use "u#" format. Co-authored-by: Victor Stinner --- Doc/library/array.rst | 13 +++--- Doc/whatsnew/3.9.rst | 6 +++ Modules/arraymodule.c | 82 +++++++++++++++++----------------- Modules/clinic/arraymodule.c.h | 17 ++++--- 4 files changed, 63 insertions(+), 55 deletions(-) diff --git a/Doc/library/array.rst b/Doc/library/array.rst index c9a9b1dabb2a79..78020738bf4f75 100644 --- a/Doc/library/array.rst +++ b/Doc/library/array.rst @@ -22,7 +22,7 @@ defined: +-----------+--------------------+-------------------+-----------------------+-------+ | ``'B'`` | unsigned char | int | 1 | | +-----------+--------------------+-------------------+-----------------------+-------+ -| ``'u'`` | Py_UNICODE | Unicode character | 2 | \(1) | +| ``'u'`` | wchar_t | Unicode character | 2 | \(1) | +-----------+--------------------+-------------------+-----------------------+-------+ | ``'h'`` | signed short | int | 2 | | +-----------+--------------------+-------------------+-----------------------+-------+ @@ -48,15 +48,16 @@ defined: Notes: (1) - The ``'u'`` type code corresponds to Python's obsolete unicode character - (:c:type:`Py_UNICODE` which is :c:type:`wchar_t`). Depending on the - platform, it can be 16 bits or 32 bits. + It can be 16 bits or 32 bits depending on the platform. - ``'u'`` will be removed together with the rest of the :c:type:`Py_UNICODE` - API. + .. versionchanged:: 3.9 + ``array('u')`` now uses ``wchar_t`` as C type instead of deprecated + ``Py_UNICODE``. This change doesn't affect to its behavior because + ``Py_UNICODE`` is alias of ``wchar_t`` since Python 3.3. .. deprecated-removed:: 3.3 4.0 + The actual representation of values is determined by the machine architecture (strictly speaking, by the C implementation). The actual size can be accessed through the :attr:`itemsize` attribute. diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 11e577baa8fb5f..c57d702dce8675 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -786,6 +786,12 @@ Changes in the Python API ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``. (Contributed by Batuhan Taskaya in :issue:`39562`) +* ``array('u')`` now uses ``wchar_t`` as C type instead of ``Py_UNICODE``. + This change doesn't affect to its behavior because ``Py_UNICODE`` is alias + of ``wchar_t`` since Python 3.3. + (Contributed by Inada Naoki in :issue:`34538`.) + + CPython bytecode changes ------------------------ diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 4920ad7b82124c..732703e481adcd 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -235,24 +235,31 @@ BB_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) static PyObject * u_getitem(arrayobject *ap, Py_ssize_t i) { - return PyUnicode_FromOrdinal(((Py_UNICODE *) ap->ob_item)[i]); + return PyUnicode_FromOrdinal(((wchar_t *) ap->ob_item)[i]); } static int u_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) { - Py_UNICODE *p; - Py_ssize_t len; - - if (!PyArg_Parse(v, "u#;array item must be unicode character", &p, &len)) + PyObject *u; + if (!PyArg_Parse(v, "U;array item must be unicode character", &u)) { return -1; - if (len != 1) { + } + + Py_ssize_t len = PyUnicode_AsWideChar(u, NULL, 0); + if (len != 2) { PyErr_SetString(PyExc_TypeError, "array item must be unicode character"); return -1; } - if (i >= 0) - ((Py_UNICODE *)ap->ob_item)[i] = p[0]; + + wchar_t w; + len = PyUnicode_AsWideChar(u, &w, 1); + assert(len == 1); + + if (i >= 0) { + ((wchar_t *)ap->ob_item)[i] = w; + } return 0; } @@ -530,7 +537,7 @@ d_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) DEFINE_COMPAREITEMS(b, signed char) DEFINE_COMPAREITEMS(BB, unsigned char) -DEFINE_COMPAREITEMS(u, Py_UNICODE) +DEFINE_COMPAREITEMS(u, wchar_t) DEFINE_COMPAREITEMS(h, short) DEFINE_COMPAREITEMS(HH, unsigned short) DEFINE_COMPAREITEMS(i, int) @@ -548,7 +555,7 @@ DEFINE_COMPAREITEMS(QQ, unsigned long long) static const struct arraydescr descriptors[] = { {'b', 1, b_getitem, b_setitem, b_compareitems, "b", 1, 1}, {'B', 1, BB_getitem, BB_setitem, BB_compareitems, "B", 1, 0}, - {'u', sizeof(Py_UNICODE), u_getitem, u_setitem, u_compareitems, "u", 0, 0}, + {'u', sizeof(wchar_t), u_getitem, u_setitem, u_compareitems, "u", 0, 0}, {'h', sizeof(short), h_getitem, h_setitem, h_compareitems, "h", 1, 1}, {'H', sizeof(short), HH_getitem, HH_setitem, HH_compareitems, "H", 1, 0}, {'i', sizeof(int), i_getitem, i_setitem, i_compareitems, "i", 1, 1}, @@ -1660,7 +1667,7 @@ array_array_tobytes_impl(arrayobject *self) /*[clinic input] array.array.fromunicode - ustr: Py_UNICODE(zeroes=True) + ustr: unicode / Extends this array with data from the unicode string ustr. @@ -1671,25 +1678,28 @@ some other type. [clinic start generated code]*/ static PyObject * -array_array_fromunicode_impl(arrayobject *self, const Py_UNICODE *ustr, - Py_ssize_clean_t ustr_length) -/*[clinic end generated code: output=cf2f662908e2befc input=150f00566ffbca6e]*/ +array_array_fromunicode_impl(arrayobject *self, PyObject *ustr) +/*[clinic end generated code: output=24359f5e001a7f2b input=025db1fdade7a4ce]*/ { - char typecode; - - typecode = self->ob_descr->typecode; - if (typecode != 'u') { + if (self->ob_descr->typecode != 'u') { PyErr_SetString(PyExc_ValueError, "fromunicode() may only be called on " "unicode type arrays"); return NULL; } - if (ustr_length > 0) { + + Py_ssize_t ustr_length = PyUnicode_AsWideChar(ustr, NULL, 0); + assert(ustr_length > 0); + if (ustr_length > 1) { + ustr_length--; /* trim trailing NUL character */ Py_ssize_t old_size = Py_SIZE(self); - if (array_resize(self, old_size + ustr_length) == -1) + if (array_resize(self, old_size + ustr_length) == -1) { return NULL; - memcpy(self->ob_item + old_size * sizeof(Py_UNICODE), - ustr, ustr_length * sizeof(Py_UNICODE)); + } + + // must not fail + PyUnicode_AsWideChar( + ustr, ((wchar_t *)self->ob_item) + old_size, ustr_length); } Py_RETURN_NONE; @@ -1709,14 +1719,12 @@ static PyObject * array_array_tounicode_impl(arrayobject *self) /*[clinic end generated code: output=08e442378336e1ef input=127242eebe70b66d]*/ { - char typecode; - typecode = self->ob_descr->typecode; - if (typecode != 'u') { + if (self->ob_descr->typecode != 'u') { PyErr_SetString(PyExc_ValueError, "tounicode() may only be called on unicode type arrays"); return NULL; } - return PyUnicode_FromWideChar((Py_UNICODE *) self->ob_item, Py_SIZE(self)); + return PyUnicode_FromWideChar((wchar_t *) self->ob_item, Py_SIZE(self)); } /*[clinic input] @@ -2675,30 +2683,20 @@ array_new(PyTypeObject *type, PyObject *args, PyObject *kwds) Py_DECREF(v); } else if (initial != NULL && PyUnicode_Check(initial)) { - Py_UNICODE *ustr; Py_ssize_t n; - - ustr = PyUnicode_AsUnicode(initial); + wchar_t *ustr = PyUnicode_AsWideCharString(initial, &n); if (ustr == NULL) { - PyErr_NoMemory(); Py_DECREF(a); return NULL; } - n = PyUnicode_GET_DATA_SIZE(initial); if (n > 0) { arrayobject *self = (arrayobject *)a; - char *item = self->ob_item; - item = (char *)PyMem_Realloc(item, n); - if (item == NULL) { - PyErr_NoMemory(); - Py_DECREF(a); - return NULL; - } - self->ob_item = item; - Py_SET_SIZE(self, n / sizeof(Py_UNICODE)); - memcpy(item, ustr, n); - self->allocated = Py_SIZE(self); + // self->ob_item may be NULL but it is safe. + PyMem_Free(self->ob_item); + self->ob_item = (char *)ustr; + Py_SET_SIZE(self, n); + self->allocated = n; } } else if (initial != NULL && array_Check(initial) && len > 0) { diff --git a/Modules/clinic/arraymodule.c.h b/Modules/clinic/arraymodule.c.h index e1f4b0397b9cb5..b9245ca91d5fa9 100644 --- a/Modules/clinic/arraymodule.c.h +++ b/Modules/clinic/arraymodule.c.h @@ -380,20 +380,23 @@ PyDoc_STRVAR(array_array_fromunicode__doc__, {"fromunicode", (PyCFunction)array_array_fromunicode, METH_O, array_array_fromunicode__doc__}, static PyObject * -array_array_fromunicode_impl(arrayobject *self, const Py_UNICODE *ustr, - Py_ssize_clean_t ustr_length); +array_array_fromunicode_impl(arrayobject *self, PyObject *ustr); static PyObject * array_array_fromunicode(arrayobject *self, PyObject *arg) { PyObject *return_value = NULL; - const Py_UNICODE *ustr; - Py_ssize_clean_t ustr_length; + PyObject *ustr; - if (!PyArg_Parse(arg, "u#:fromunicode", &ustr, &ustr_length)) { + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("fromunicode", "argument", "str", arg); goto exit; } - return_value = array_array_fromunicode_impl(self, ustr, ustr_length); + if (PyUnicode_READY(arg) == -1) { + goto exit; + } + ustr = arg; + return_value = array_array_fromunicode_impl(self, ustr); exit: return return_value; @@ -531,4 +534,4 @@ PyDoc_STRVAR(array_arrayiterator___setstate____doc__, #define ARRAY_ARRAYITERATOR___SETSTATE___METHODDEF \ {"__setstate__", (PyCFunction)array_arrayiterator___setstate__, METH_O, array_arrayiterator___setstate____doc__}, -/*[clinic end generated code: output=f649fc0bc9f6b13a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=9f70748dd3bc532f input=a9049054013a1b77]*/ From ef7973a981ff8f4687ef3fdb85a69fa15aa11fe5 Mon Sep 17 00:00:00 2001 From: Brad Solomon Date: Mon, 11 May 2020 14:50:11 -0400 Subject: [PATCH 062/115] bpo-40561: Add docstrings for webbrowser open functions (GH-19999) Co-authored-by: Brad Solomon Co-authored-by: Terry Jan Reedy --- Lib/webbrowser.py | 16 ++++++++++++++++ .../2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst | 1 + 2 files changed, 17 insertions(+) create mode 100644 Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 1ef179a91a6f19..9c73bcfb44ae81 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -69,6 +69,14 @@ def get(using=None): # instead of "from webbrowser import *". def open(url, new=0, autoraise=True): + """Display url using the default browser. + + If possible, open url in a location determined by new. + - 0: the same browser window (the default). + - 1: a new browser window. + - 2: a new browser page ("tab"). + If possible, autoraise raises the window (the default) or not. + """ if _tryorder is None: with _lock: if _tryorder is None: @@ -80,9 +88,17 @@ def open(url, new=0, autoraise=True): return False def open_new(url): + """Open url in a new window of the default browser. + + If not possible, then open url in the only browser window. + """ return open(url, 1) def open_new_tab(url): + """Open url in a new page ("tab") of the default browser. + + If not possible, then the behavior becomes equivalent to open_new(). + """ return open(url, 2) diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst new file mode 100644 index 00000000000000..bda24719b12cb3 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst @@ -0,0 +1 @@ +Provide docstrings for webbrowser open functions. From 86d69444e7cfe758212956df0be0ec7b8a4251a6 Mon Sep 17 00:00:00 2001 From: Hai Shi Date: Tue, 12 May 2020 05:38:55 +0800 Subject: [PATCH 063/115] bpo-40584: Update PyType_FromModuleAndSpec() to process tp_vectorcall_offset (GH-20026) --- Doc/c-api/structures.rst | 8 +++++--- Doc/c-api/type.rst | 1 + Objects/typeobject.c | 14 ++++++++++++-- 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index ea97e1e715561f..634e971952e8eb 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -424,9 +424,11 @@ Accessing attributes of extension types Heap allocated types (created using :c:func:`PyType_FromSpec` or similar), ``PyMemberDef`` may contain definitions for the special members - ``__dictoffset__`` and ``__weaklistoffset__``, corresponding to - :c:member:`~PyTypeObject.tp_dictoffset` and - :c:member:`~PyTypeObject.tp_weaklistoffset` in type objects. + ``__dictoffset__``, ``__weaklistoffset__`` and ``__vectorcalloffset__``, + corresponding to + :c:member:`~PyTypeObject.tp_dictoffset`, + :c:member:`~PyTypeObject.tp_weaklistoffset` and + :c:member:`~PyTypeObject.tp_vectorcall_offset` in type objects. These must be defined with ``T_PYSSIZET`` and ``READONLY``, for example:: static PyMemberDef spam_type_members[] = { diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index 7dd393f47f1b4f..f387279d143eec 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -228,6 +228,7 @@ The following functions and structs are used to create * :c:member:`~PyTypeObject.tp_dictoffset` (see :ref:`PyMemberDef `) * :c:member:`~PyTypeObject.tp_vectorcall_offset` + (see :ref:`PyMemberDef `) * :c:member:`~PyBufferProcs.bf_getbuffer` * :c:member:`~PyBufferProcs.bf_releasebuffer` diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 525f5ac5d5775a..a36b4dcc46d21b 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2954,10 +2954,10 @@ PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) PyTypeObject *type, *base; const PyType_Slot *slot; - Py_ssize_t nmembers, weaklistoffset, dictoffset; + Py_ssize_t nmembers, weaklistoffset, dictoffset, vectorcalloffset; char *res_start; - nmembers = weaklistoffset = dictoffset = 0; + nmembers = weaklistoffset = dictoffset = vectorcalloffset = 0; for (slot = spec->slots; slot->slot; slot++) { if (slot->slot == Py_tp_members) { nmembers = 0; @@ -2975,6 +2975,12 @@ PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) assert(memb->flags == READONLY); dictoffset = memb->offset; } + if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + // The PyMemberDef must be a Py_ssize_t and readonly + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + vectorcalloffset = memb->offset; + } } } } @@ -3123,6 +3129,10 @@ PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) type->tp_dealloc = subtype_dealloc; } + if (vectorcalloffset) { + type->tp_vectorcall_offset = vectorcalloffset; + } + if (PyType_Ready(type) < 0) goto fail; From 27c0d9b54abaa4112d5a317b8aa78b39ad60a808 Mon Sep 17 00:00:00 2001 From: Shantanu Date: Mon, 11 May 2020 14:53:58 -0700 Subject: [PATCH 064/115] bpo-40334: produce specialized errors for invalid del targets (GH-19911) --- Grammar/python.gram | 13 +- Lib/test/test_grammar.py | 17 ++ Lib/test/test_syntax.py | 41 +++- Parser/pegen/parse.c | 462 +++++++++++++++++++++++++-------------- 4 files changed, 352 insertions(+), 181 deletions(-) diff --git a/Grammar/python.gram b/Grammar/python.gram index 574e1e14216449..0542107cac3e6b 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -583,15 +583,19 @@ ann_assign_subscript_attribute_target[expr_ty]: | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) } del_targets[asdl_seq*]: a=','.del_target+ [','] { a } +# The lookaheads to del_target_end ensure that we don't match expressions where a prefix of the +# expression matches our rule, thereby letting these cases fall through to invalid_del_target. del_target[expr_ty] (memo): - | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Del, EXTRA) } - | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Del, EXTRA) } + | a=t_primary '.' b=NAME &del_target_end { _Py_Attribute(a, b->v.Name.id, Del, EXTRA) } + | a=t_primary '[' b=slices ']' &del_target_end { _Py_Subscript(a, b, Del, EXTRA) } | del_t_atom del_t_atom[expr_ty]: - | a=NAME { _PyPegen_set_expr_context(p, a, Del) } + | a=NAME &del_target_end { _PyPegen_set_expr_context(p, a, Del) } | '(' a=del_target ')' { _PyPegen_set_expr_context(p, a, Del) } | '(' a=[del_targets] ')' { _Py_Tuple(a, Del, EXTRA) } | '[' a=[del_targets] ']' { _Py_List(a, Del, EXTRA) } + | invalid_del_target +del_target_end: ')' | ']' | ',' | ';' | NEWLINE targets[asdl_seq*]: a=','.target+ [','] { a } target[expr_ty] (memo): @@ -649,3 +653,6 @@ invalid_lambda_star_etc: invalid_double_type_comments: | TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT { RAISE_SYNTAX_ERROR("Cannot have two type comments on def") } +invalid_del_target: + | a=star_expression &del_target_end { + RAISE_SYNTAX_ERROR("cannot delete %s", _PyPegen_get_expr_name(a)) } diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index c24d3529490be0..02ba8a8b1579a5 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -801,6 +801,23 @@ def test_del_stmt(self): del abc del x, y, (z, xyz) + x, y, z = "xyz" + del x + del y, + del (z) + del () + + a, b, c, d, e, f, g = "abcdefg" + del a, (b, c), (d, (e, f)) + + a, b, c, d, e, f, g = "abcdefg" + del a, [b, c], (d, [e, f]) + + abcd = list("abcd") + del abcd[1:2] + + compile("del a, (b[0].c, (d.e, f.g[1:2])), [h.i.j], ()", "", "exec") + def test_pass_stmt(self): # 'pass' pass diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 0c0fc48e0d3de1..06636ae8a149a4 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -63,10 +63,9 @@ Traceback (most recent call last): SyntaxError: cannot assign to function call -# Pegen does not support this yet -# >>> del f() -# Traceback (most recent call last): -# SyntaxError: cannot delete function call +>>> del f() +Traceback (most recent call last): +SyntaxError: cannot delete function call >>> a + 1 = 2 Traceback (most recent call last): @@ -665,7 +664,7 @@ def _check_error(self, code, errtext, self.fail("SyntaxError is not a %s" % subclass.__name__) mo = re.search(errtext, str(err)) if mo is None: - self.fail("SyntaxError did not contain '%r'" % (errtext,)) + self.fail("SyntaxError did not contain %r" % (errtext,)) self.assertEqual(err.filename, filename) if lineno is not None: self.assertEqual(err.lineno, lineno) @@ -677,10 +676,36 @@ def _check_error(self, code, errtext, def test_assign_call(self): self._check_error("f() = 1", "assign") - @support.skip_if_new_parser("Pegen does not produce a specialized error " - "message yet") def test_assign_del(self): - self._check_error("del f()", "delete") + self._check_error("del (,)", "invalid syntax") + self._check_error("del 1", "delete literal") + self._check_error("del (1, 2)", "delete literal") + self._check_error("del None", "delete None") + self._check_error("del *x", "delete starred") + self._check_error("del (*x)", "delete starred") + self._check_error("del (*x,)", "delete starred") + self._check_error("del [*x,]", "delete starred") + self._check_error("del f()", "delete function call") + self._check_error("del f(a, b)", "delete function call") + self._check_error("del o.f()", "delete function call") + self._check_error("del a[0]()", "delete function call") + self._check_error("del x, f()", "delete function call") + self._check_error("del f(), x", "delete function call") + self._check_error("del [a, b, ((c), (d,), e.f())]", "delete function call") + self._check_error("del (a if True else b)", "delete conditional") + self._check_error("del +a", "delete operator") + self._check_error("del a, +b", "delete operator") + self._check_error("del a + b", "delete operator") + self._check_error("del (a + b, c)", "delete operator") + self._check_error("del (c[0], a + b)", "delete operator") + self._check_error("del a.b.c + 2", "delete operator") + self._check_error("del a.b.c[0] + 2", "delete operator") + self._check_error("del (a, b, (c, d.e.f + 2))", "delete operator") + self._check_error("del [a, b, (c, d.e.f[0] + 2)]", "delete operator") + self._check_error("del (a := 5)", "delete named expression") + # We don't have a special message for this, but make sure we don't + # report "cannot delete name" + self._check_error("del a += b", "invalid syntax") def test_global_param_err_first(self): source = """if 1: diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 27feda73d99e18..a1a6f4c06bf63d 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -204,168 +204,170 @@ static KeywordToken *reserved_keywords[] = { #define del_targets_type 1133 #define del_target_type 1134 #define del_t_atom_type 1135 -#define targets_type 1136 -#define target_type 1137 -#define t_primary_type 1138 // Left-recursive -#define t_lookahead_type 1139 -#define t_atom_type 1140 -#define incorrect_arguments_type 1141 -#define invalid_kwarg_type 1142 -#define invalid_named_expression_type 1143 -#define invalid_assignment_type 1144 -#define invalid_block_type 1145 -#define invalid_comprehension_type 1146 -#define invalid_parameters_type 1147 -#define invalid_star_etc_type 1148 -#define invalid_lambda_star_etc_type 1149 -#define invalid_double_type_comments_type 1150 -#define _loop0_1_type 1151 -#define _loop0_2_type 1152 -#define _loop0_4_type 1153 -#define _gather_3_type 1154 -#define _loop0_6_type 1155 -#define _gather_5_type 1156 -#define _loop0_8_type 1157 -#define _gather_7_type 1158 -#define _loop0_10_type 1159 -#define _gather_9_type 1160 -#define _loop1_11_type 1161 -#define _loop0_13_type 1162 -#define _gather_12_type 1163 -#define _tmp_14_type 1164 -#define _tmp_15_type 1165 -#define _tmp_16_type 1166 -#define _tmp_17_type 1167 -#define _tmp_18_type 1168 -#define _tmp_19_type 1169 -#define _tmp_20_type 1170 -#define _tmp_21_type 1171 -#define _loop1_22_type 1172 -#define _tmp_23_type 1173 -#define _tmp_24_type 1174 -#define _loop0_26_type 1175 -#define _gather_25_type 1176 -#define _loop0_28_type 1177 -#define _gather_27_type 1178 -#define _tmp_29_type 1179 -#define _loop0_30_type 1180 -#define _loop1_31_type 1181 -#define _loop0_33_type 1182 -#define _gather_32_type 1183 -#define _tmp_34_type 1184 -#define _loop0_36_type 1185 -#define _gather_35_type 1186 -#define _tmp_37_type 1187 -#define _loop0_39_type 1188 -#define _gather_38_type 1189 -#define _loop0_41_type 1190 -#define _gather_40_type 1191 -#define _loop0_43_type 1192 -#define _gather_42_type 1193 -#define _loop0_45_type 1194 -#define _gather_44_type 1195 -#define _tmp_46_type 1196 -#define _loop1_47_type 1197 -#define _tmp_48_type 1198 -#define _tmp_49_type 1199 -#define _tmp_50_type 1200 -#define _tmp_51_type 1201 -#define _tmp_52_type 1202 -#define _loop0_53_type 1203 -#define _loop0_54_type 1204 -#define _loop0_55_type 1205 -#define _loop1_56_type 1206 -#define _loop0_57_type 1207 -#define _loop1_58_type 1208 -#define _loop1_59_type 1209 -#define _loop1_60_type 1210 -#define _loop0_61_type 1211 -#define _loop1_62_type 1212 -#define _loop0_63_type 1213 -#define _loop1_64_type 1214 -#define _loop0_65_type 1215 -#define _loop1_66_type 1216 -#define _loop1_67_type 1217 -#define _tmp_68_type 1218 -#define _loop0_70_type 1219 -#define _gather_69_type 1220 -#define _loop1_71_type 1221 -#define _loop0_73_type 1222 -#define _gather_72_type 1223 -#define _loop1_74_type 1224 -#define _loop0_75_type 1225 -#define _loop0_76_type 1226 -#define _loop0_77_type 1227 -#define _loop1_78_type 1228 -#define _loop0_79_type 1229 -#define _loop1_80_type 1230 -#define _loop1_81_type 1231 -#define _loop1_82_type 1232 -#define _loop0_83_type 1233 -#define _loop1_84_type 1234 -#define _loop0_85_type 1235 -#define _loop1_86_type 1236 -#define _loop0_87_type 1237 -#define _loop1_88_type 1238 -#define _loop1_89_type 1239 -#define _loop1_90_type 1240 -#define _loop1_91_type 1241 -#define _tmp_92_type 1242 -#define _loop0_94_type 1243 -#define _gather_93_type 1244 -#define _tmp_95_type 1245 -#define _tmp_96_type 1246 -#define _tmp_97_type 1247 -#define _tmp_98_type 1248 -#define _loop1_99_type 1249 -#define _tmp_100_type 1250 -#define _tmp_101_type 1251 -#define _loop0_103_type 1252 -#define _gather_102_type 1253 -#define _loop1_104_type 1254 -#define _loop0_105_type 1255 -#define _loop0_106_type 1256 -#define _tmp_107_type 1257 -#define _tmp_108_type 1258 -#define _loop0_110_type 1259 -#define _gather_109_type 1260 -#define _loop0_112_type 1261 -#define _gather_111_type 1262 -#define _loop0_114_type 1263 -#define _gather_113_type 1264 -#define _loop0_116_type 1265 -#define _gather_115_type 1266 -#define _loop0_117_type 1267 -#define _loop0_119_type 1268 -#define _gather_118_type 1269 -#define _tmp_120_type 1270 -#define _loop0_122_type 1271 -#define _gather_121_type 1272 -#define _loop0_124_type 1273 -#define _gather_123_type 1274 -#define _tmp_125_type 1275 -#define _tmp_126_type 1276 -#define _tmp_127_type 1277 -#define _tmp_128_type 1278 -#define _tmp_129_type 1279 -#define _loop0_130_type 1280 -#define _tmp_131_type 1281 -#define _tmp_132_type 1282 -#define _tmp_133_type 1283 -#define _tmp_134_type 1284 -#define _tmp_135_type 1285 -#define _tmp_136_type 1286 -#define _tmp_137_type 1287 -#define _tmp_138_type 1288 -#define _tmp_139_type 1289 -#define _tmp_140_type 1290 -#define _tmp_141_type 1291 -#define _tmp_142_type 1292 -#define _tmp_143_type 1293 -#define _tmp_144_type 1294 -#define _loop1_145_type 1295 -#define _tmp_146_type 1296 -#define _tmp_147_type 1297 +#define del_target_end_type 1136 +#define targets_type 1137 +#define target_type 1138 +#define t_primary_type 1139 // Left-recursive +#define t_lookahead_type 1140 +#define t_atom_type 1141 +#define incorrect_arguments_type 1142 +#define invalid_kwarg_type 1143 +#define invalid_named_expression_type 1144 +#define invalid_assignment_type 1145 +#define invalid_block_type 1146 +#define invalid_comprehension_type 1147 +#define invalid_parameters_type 1148 +#define invalid_star_etc_type 1149 +#define invalid_lambda_star_etc_type 1150 +#define invalid_double_type_comments_type 1151 +#define invalid_del_target_type 1152 +#define _loop0_1_type 1153 +#define _loop0_2_type 1154 +#define _loop0_4_type 1155 +#define _gather_3_type 1156 +#define _loop0_6_type 1157 +#define _gather_5_type 1158 +#define _loop0_8_type 1159 +#define _gather_7_type 1160 +#define _loop0_10_type 1161 +#define _gather_9_type 1162 +#define _loop1_11_type 1163 +#define _loop0_13_type 1164 +#define _gather_12_type 1165 +#define _tmp_14_type 1166 +#define _tmp_15_type 1167 +#define _tmp_16_type 1168 +#define _tmp_17_type 1169 +#define _tmp_18_type 1170 +#define _tmp_19_type 1171 +#define _tmp_20_type 1172 +#define _tmp_21_type 1173 +#define _loop1_22_type 1174 +#define _tmp_23_type 1175 +#define _tmp_24_type 1176 +#define _loop0_26_type 1177 +#define _gather_25_type 1178 +#define _loop0_28_type 1179 +#define _gather_27_type 1180 +#define _tmp_29_type 1181 +#define _loop0_30_type 1182 +#define _loop1_31_type 1183 +#define _loop0_33_type 1184 +#define _gather_32_type 1185 +#define _tmp_34_type 1186 +#define _loop0_36_type 1187 +#define _gather_35_type 1188 +#define _tmp_37_type 1189 +#define _loop0_39_type 1190 +#define _gather_38_type 1191 +#define _loop0_41_type 1192 +#define _gather_40_type 1193 +#define _loop0_43_type 1194 +#define _gather_42_type 1195 +#define _loop0_45_type 1196 +#define _gather_44_type 1197 +#define _tmp_46_type 1198 +#define _loop1_47_type 1199 +#define _tmp_48_type 1200 +#define _tmp_49_type 1201 +#define _tmp_50_type 1202 +#define _tmp_51_type 1203 +#define _tmp_52_type 1204 +#define _loop0_53_type 1205 +#define _loop0_54_type 1206 +#define _loop0_55_type 1207 +#define _loop1_56_type 1208 +#define _loop0_57_type 1209 +#define _loop1_58_type 1210 +#define _loop1_59_type 1211 +#define _loop1_60_type 1212 +#define _loop0_61_type 1213 +#define _loop1_62_type 1214 +#define _loop0_63_type 1215 +#define _loop1_64_type 1216 +#define _loop0_65_type 1217 +#define _loop1_66_type 1218 +#define _loop1_67_type 1219 +#define _tmp_68_type 1220 +#define _loop0_70_type 1221 +#define _gather_69_type 1222 +#define _loop1_71_type 1223 +#define _loop0_73_type 1224 +#define _gather_72_type 1225 +#define _loop1_74_type 1226 +#define _loop0_75_type 1227 +#define _loop0_76_type 1228 +#define _loop0_77_type 1229 +#define _loop1_78_type 1230 +#define _loop0_79_type 1231 +#define _loop1_80_type 1232 +#define _loop1_81_type 1233 +#define _loop1_82_type 1234 +#define _loop0_83_type 1235 +#define _loop1_84_type 1236 +#define _loop0_85_type 1237 +#define _loop1_86_type 1238 +#define _loop0_87_type 1239 +#define _loop1_88_type 1240 +#define _loop1_89_type 1241 +#define _loop1_90_type 1242 +#define _loop1_91_type 1243 +#define _tmp_92_type 1244 +#define _loop0_94_type 1245 +#define _gather_93_type 1246 +#define _tmp_95_type 1247 +#define _tmp_96_type 1248 +#define _tmp_97_type 1249 +#define _tmp_98_type 1250 +#define _loop1_99_type 1251 +#define _tmp_100_type 1252 +#define _tmp_101_type 1253 +#define _loop0_103_type 1254 +#define _gather_102_type 1255 +#define _loop1_104_type 1256 +#define _loop0_105_type 1257 +#define _loop0_106_type 1258 +#define _tmp_107_type 1259 +#define _tmp_108_type 1260 +#define _loop0_110_type 1261 +#define _gather_109_type 1262 +#define _loop0_112_type 1263 +#define _gather_111_type 1264 +#define _loop0_114_type 1265 +#define _gather_113_type 1266 +#define _loop0_116_type 1267 +#define _gather_115_type 1268 +#define _loop0_117_type 1269 +#define _loop0_119_type 1270 +#define _gather_118_type 1271 +#define _tmp_120_type 1272 +#define _loop0_122_type 1273 +#define _gather_121_type 1274 +#define _loop0_124_type 1275 +#define _gather_123_type 1276 +#define _tmp_125_type 1277 +#define _tmp_126_type 1278 +#define _tmp_127_type 1279 +#define _tmp_128_type 1280 +#define _tmp_129_type 1281 +#define _loop0_130_type 1282 +#define _tmp_131_type 1283 +#define _tmp_132_type 1284 +#define _tmp_133_type 1285 +#define _tmp_134_type 1286 +#define _tmp_135_type 1287 +#define _tmp_136_type 1288 +#define _tmp_137_type 1289 +#define _tmp_138_type 1290 +#define _tmp_139_type 1291 +#define _tmp_140_type 1292 +#define _tmp_141_type 1293 +#define _tmp_142_type 1294 +#define _tmp_143_type 1295 +#define _tmp_144_type 1296 +#define _loop1_145_type 1297 +#define _tmp_146_type 1298 +#define _tmp_147_type 1299 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -503,6 +505,7 @@ static expr_ty ann_assign_subscript_attribute_target_rule(Parser *p); static asdl_seq* del_targets_rule(Parser *p); static expr_ty del_target_rule(Parser *p); static expr_ty del_t_atom_rule(Parser *p); +static void *del_target_end_rule(Parser *p); static asdl_seq* targets_rule(Parser *p); static expr_ty target_rule(Parser *p); static expr_ty t_primary_rule(Parser *p); @@ -518,6 +521,7 @@ static void *invalid_parameters_rule(Parser *p); static void *invalid_star_etc_rule(Parser *p); static void *invalid_lambda_star_etc_rule(Parser *p); static void *invalid_double_type_comments_rule(Parser *p); +static void *invalid_del_target_rule(Parser *p); static asdl_seq *_loop0_1_rule(Parser *p); static asdl_seq *_loop0_2_rule(Parser *p); static asdl_seq *_loop0_4_rule(Parser *p); @@ -9786,8 +9790,8 @@ del_targets_rule(Parser *p) } // del_target: -// | t_primary '.' NAME !t_lookahead -// | t_primary '[' slices ']' !t_lookahead +// | t_primary '.' NAME &del_target_end +// | t_primary '[' slices ']' &del_target_end // | del_t_atom static expr_ty del_target_rule(Parser *p) @@ -9807,7 +9811,7 @@ del_target_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // t_primary '.' NAME !t_lookahead + { // t_primary '.' NAME &del_target_end Token * _literal; expr_ty a; expr_ty b; @@ -9818,7 +9822,7 @@ del_target_rule(Parser *p) && (b = _PyPegen_name_token(p)) // NAME && - _PyPegen_lookahead(0, t_lookahead_rule, p) + _PyPegen_lookahead(1, del_target_end_rule, p) ) { Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9838,7 +9842,7 @@ del_target_rule(Parser *p) } p->mark = _mark; } - { // t_primary '[' slices ']' !t_lookahead + { // t_primary '[' slices ']' &del_target_end Token * _literal; Token * _literal_1; expr_ty a; @@ -9852,7 +9856,7 @@ del_target_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && - _PyPegen_lookahead(0, t_lookahead_rule, p) + _PyPegen_lookahead(1, del_target_end_rule, p) ) { Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); @@ -9889,7 +9893,12 @@ del_target_rule(Parser *p) return _res; } -// del_t_atom: NAME | '(' del_target ')' | '(' del_targets? ')' | '[' del_targets? ']' +// del_t_atom: +// | NAME &del_target_end +// | '(' del_target ')' +// | '(' del_targets? ')' +// | '[' del_targets? ']' +// | invalid_del_target static expr_ty del_t_atom_rule(Parser *p) { @@ -9906,10 +9915,12 @@ del_t_atom_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME + { // NAME &del_target_end expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME + && + _PyPegen_lookahead(1, del_target_end_rule, p) ) { _res = _PyPegen_set_expr_context ( p , a , Del ); @@ -10000,6 +10011,86 @@ del_t_atom_rule(Parser *p) } p->mark = _mark; } + { // invalid_del_target + void *invalid_del_target_var; + if ( + (invalid_del_target_var = invalid_del_target_rule(p)) // invalid_del_target + ) + { + _res = invalid_del_target_var; + goto done; + } + p->mark = _mark; + } + _res = NULL; + done: + return _res; +} + +// del_target_end: ')' | ']' | ',' | ';' | NEWLINE +static void * +del_target_end_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ')' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // ']' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 10)) // token=']' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // ',' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // ';' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 13)) // token=';' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // NEWLINE + Token * newline_var; + if ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = newline_var; + goto done; + } + p->mark = _mark; + } _res = NULL; done: return _res; @@ -10962,6 +11053,37 @@ invalid_double_type_comments_rule(Parser *p) return _res; } +// invalid_del_target: star_expression &del_target_end +static void * +invalid_del_target_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // star_expression &del_target_end + expr_ty a; + if ( + (a = star_expression_rule(p)) // star_expression + && + _PyPegen_lookahead(1, del_target_end_rule, p) + ) + { + _res = RAISE_SYNTAX_ERROR ( "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + _res = NULL; + done: + return _res; +} + // _loop0_1: NEWLINE static asdl_seq * _loop0_1_rule(Parser *p) From 4804b5b3df82e7892ca0550b02f902bcfc16bb48 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 12 May 2020 01:43:38 +0200 Subject: [PATCH 065/115] bpo-39465: Don't access directly _Py_Identifier members (GH-20043) * Replace id->object with _PyUnicode_FromId(&id) * Use _Py_static_string_init(str) macro to initialize statically name_op in typeobject.c. --- Modules/_cursesmodule.c | 4 ++-- Objects/abstract.c | 2 +- Objects/typeobject.c | 14 +++++++------- Python/ceval.c | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c index 08991fd54808fb..c70b0e2a19fadc 100644 --- a/Modules/_cursesmodule.c +++ b/Modules/_cursesmodule.c @@ -3814,7 +3814,7 @@ update_lines_cols(void) return 0; } /* PyId_LINES.object will be initialized here. */ - if (PyDict_SetItem(ModDict, PyId_LINES.object, o)) { + if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_LINES), o)) { Py_DECREF(m); Py_DECREF(o); return 0; @@ -3830,7 +3830,7 @@ update_lines_cols(void) Py_DECREF(o); return 0; } - if (PyDict_SetItem(ModDict, PyId_COLS.object, o)) { + if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_COLS), o)) { Py_DECREF(m); Py_DECREF(o); return 0; diff --git a/Objects/abstract.c b/Objects/abstract.c index 6e390dd92c3aef..b014f79e8d0fba 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -2287,7 +2287,7 @@ method_output_as_list(PyObject *o, _Py_Identifier *meth_id) PyErr_Format(PyExc_TypeError, "%.200s.%U() returned a non-iterable (type %.200s)", Py_TYPE(o)->tp_name, - meth_id->object, + _PyUnicode_FromId(meth_id), Py_TYPE(meth_output)->tp_name); } Py_DECREF(meth_output); diff --git a/Objects/typeobject.c b/Objects/typeobject.c index a36b4dcc46d21b..243f8811b62571 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -1519,7 +1519,7 @@ lookup_method(PyObject *self, _Py_Identifier *attrid, int *unbound) { PyObject *res = lookup_maybe_method(self, attrid, unbound); if (res == NULL && !PyErr_Occurred()) { - PyErr_SetObject(PyExc_AttributeError, attrid->object); + PyErr_SetObject(PyExc_AttributeError, _PyUnicode_FromId(attrid)); } return res; } @@ -6864,12 +6864,12 @@ slot_tp_setattro(PyObject *self, PyObject *name, PyObject *value) } static _Py_Identifier name_op[] = { - {0, "__lt__", 0}, - {0, "__le__", 0}, - {0, "__eq__", 0}, - {0, "__ne__", 0}, - {0, "__gt__", 0}, - {0, "__ge__", 0} + _Py_static_string_init("__lt__"), + _Py_static_string_init("__le__"), + _Py_static_string_init("__eq__"), + _Py_static_string_init("__ne__"), + _Py_static_string_init("__gt__"), + _Py_static_string_init("__ge__"), }; static PyObject * diff --git a/Python/ceval.c b/Python/ceval.c index 6435bd05446aa2..e54e344a5fd514 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -4414,7 +4414,7 @@ special_lookup(PyThreadState *tstate, PyObject *o, _Py_Identifier *id) PyObject *res; res = _PyObject_LookupSpecial(o, id); if (res == NULL && !_PyErr_Occurred(tstate)) { - _PyErr_SetObject(tstate, PyExc_AttributeError, id->object); + _PyErr_SetObject(tstate, PyExc_AttributeError, _PyUnicode_FromId(id)); return NULL; } return res; From 21cdb711e3b1975398c54141e519ead02670610e Mon Sep 17 00:00:00 2001 From: Raymond Hettinger Date: Mon, 11 May 2020 17:00:53 -0700 Subject: [PATCH 066/115] bpo-40571: Make lru_cache(maxsize=None) more discoverable (GH-20019) --- Doc/library/functools.rst | 26 +++++++++++++++++++ Lib/functools.py | 11 +++++++- Lib/test/test_functools.py | 19 ++++++++++++++ .../2020-05-09-15-38-25.bpo-40571.kOXZGC.rst | 2 ++ 4 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 856c1c790ae361..204e66ae5ac407 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -26,6 +26,32 @@ function for the purposes of this module. The :mod:`functools` module defines the following functions: +.. decorator:: cache(user_function) + + Simple lightweight unbounded function cache. Sometimes called + `"memoize" `_. + + Returns the same as ``lru_cache(maxsize=None)``, creating a thin + wrapper around a dictionary lookup for the function arguments. Because it + never needs to evict old values, this is smaller and faster than + :func:`lru_cache()` with a size limit. + + For example:: + + @cache + def factorial(n): + return n * factorial(n-1) if n else 1 + + >>> factorial(10) # no previously cached result, makes 11 recursive calls + 3628800 + >>> factorial(5) # just looks up cached value result + 120 + >>> factorial(12) # makes two new recursive calls, the other 10 are cached + 479001600 + + .. versionadded:: 3.9 + + .. decorator:: cached_property(func) Transform a method of a class into a property whose value is computed once diff --git a/Lib/functools.py b/Lib/functools.py index f05b106b62c007..87c7d87438998b 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -10,7 +10,7 @@ # See C source code for _functools credits/copyright __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', - 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', + 'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce', 'TopologicalSorter', 'CycleError', 'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod', 'cached_property'] @@ -888,6 +888,15 @@ def cache_clear(): pass +################################################################################ +### cache -- simplified access to the infinity cache +################################################################################ + +def cache(user_function, /): + 'Simple lightweight unbounded cache. Sometimes called "memoize".' + return lru_cache(maxsize=None)(user_function) + + ################################################################################ ### singledispatch() - single-dispatch generic function decorator ################################################################################ diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index b3893a15566fa6..e122fe0b333402 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -1432,6 +1432,25 @@ def check_order_with_hash_seed(seed): self.assertEqual(run1, run2) +class TestCache: + # This tests that the pass-through is working as designed. + # The underlying functionality is tested in TestLRU. + + def test_cache(self): + @self.module.cache + def fib(n): + if n < 2: + return n + return fib(n-1) + fib(n-2) + self.assertEqual([fib(n) for n in range(16)], + [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]) + self.assertEqual(fib.cache_info(), + self.module._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16)) + fib.cache_clear() + self.assertEqual(fib.cache_info(), + self.module._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0)) + + class TestLRU: def test_lru(self): diff --git a/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst b/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst new file mode 100644 index 00000000000000..476770f6974d2f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst @@ -0,0 +1,2 @@ +Added functools.cache() as a simpler, more discoverable way to access the +unbounded cache variant of lru_cache(maxsize=None). From b617993b7c0b0f6f679ef7003a62d0318b6d6af9 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 12 May 2020 02:42:19 +0200 Subject: [PATCH 067/115] bpo-40602: Rename hashtable.h to pycore_hashtable.h (GH-20044) * Move Modules/hashtable.h to Include/internal/pycore_hashtable.h * Move Modules/hashtable.c to Python/hashtable.c * Python is now linked to hashtable.c. _tracemalloc is no longer linked to hashtable.c. Previously, marshal.c got hashtable.c via _tracemalloc.c which is built as a builtin module. --- .../internal/pycore_hashtable.h | 17 ++++++++++++----- Makefile.pre.in | 2 ++ Modules/Setup | 2 +- Modules/_tracemalloc.c | 2 +- PCbuild/pythoncore.vcxproj | 3 ++- PCbuild/pythoncore.vcxproj.filters | 9 ++++++--- {Modules => Python}/hashtable.c | 2 +- Python/marshal.c | 2 +- 8 files changed, 26 insertions(+), 13 deletions(-) rename Modules/hashtable.h => Include/internal/pycore_hashtable.h (96%) rename {Modules => Python}/hashtable.c (99%) diff --git a/Modules/hashtable.h b/Include/internal/pycore_hashtable.h similarity index 96% rename from Modules/hashtable.h rename to Include/internal/pycore_hashtable.h index dbec23d2851872..585f76b51d7112 100644 --- a/Modules/hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -1,7 +1,12 @@ -#ifndef Py_HASHTABLE_H -#define Py_HASHTABLE_H -/* The whole API is private */ -#ifndef Py_LIMITED_API +#ifndef Py_INTERNAL_HASHTABLE_H +#define Py_INTERNAL_HASHTABLE_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif /* Single linked list */ @@ -207,5 +212,7 @@ PyAPI_FUNC(int) _Py_hashtable_pop( _Py_hashtable_pop(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) -#endif /* Py_LIMITED_API */ +#ifdef __cplusplus +} #endif +#endif /* !Py_INTERNAL_HASHTABLE_H */ diff --git a/Makefile.pre.in b/Makefile.pre.in index 0d616d304484ce..d545a9efb3cd99 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -351,6 +351,7 @@ PYTHON_OBJS= \ Python/getversion.o \ Python/graminit.o \ Python/hamt.o \ + Python/hashtable.o \ Python/import.o \ Python/importdl.o \ Python/initconfig.o \ @@ -1131,6 +1132,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_getopt.h \ $(srcdir)/Include/internal/pycore_gil.h \ $(srcdir)/Include/internal/pycore_hamt.h \ + $(srcdir)/Include/internal/pycore_hashtable.h \ $(srcdir)/Include/internal/pycore_import.h \ $(srcdir)/Include/internal/pycore_initconfig.h \ $(srcdir)/Include/internal/pycore_interp.h \ diff --git a/Modules/Setup b/Modules/Setup index 6bf142419de3d9..87e73bac78faec 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -132,7 +132,7 @@ faulthandler faulthandler.c # # bpo-35053: The module must be builtin since _Py_NewReference() # can call _PyTraceMalloc_NewReference(). -_tracemalloc _tracemalloc.c hashtable.c +_tracemalloc _tracemalloc.c # PEG-based parser module -- slated to be *the* parser _peg_parser _peg_parser.c diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index ea7e0127366ab0..f22338166d0dc1 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -2,7 +2,7 @@ #include "pycore_gc.h" // PyGC_Head #include "pycore_pymem.h" // _Py_tracemalloc_config #include "pycore_traceback.h" -#include "hashtable.h" +#include "pycore_hashtable.h" #include "frameobject.h" // PyFrame_GetBack() #include "clinic/_tracemalloc.c.h" diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 73274ac9acf557..b6b0cf3e991ba7 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -181,6 +181,7 @@ + @@ -335,7 +336,6 @@ - @@ -462,6 +462,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 254c8fbbea5fb8..10dfffba6113e5 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -237,6 +237,9 @@ Include + + Include + Include @@ -1034,6 +1037,9 @@ Python + + Modules + Python @@ -1142,9 +1148,6 @@ Modules - - Modules - PC diff --git a/Modules/hashtable.c b/Python/hashtable.c similarity index 99% rename from Modules/hashtable.c rename to Python/hashtable.c index 4a36a1e71cdd05..22b84590105f9e 100644 --- a/Modules/hashtable.c +++ b/Python/hashtable.c @@ -45,7 +45,7 @@ */ #include "Python.h" -#include "hashtable.h" +#include "pycore_hashtable.h" #define HASHTABLE_MIN_SIZE 16 #define HASHTABLE_HIGH 0.50 diff --git a/Python/marshal.c b/Python/marshal.c index b4429aea502d3f..d2bff524f30dde 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -12,7 +12,7 @@ #include "longintrepr.h" #include "code.h" #include "marshal.h" -#include "../Modules/hashtable.h" +#include "pycore_hashtable.h" /*[clinic input] module marshal From d0919f0d6bb757b6bcfd7b2e15656d318c9d5cd9 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 12 May 2020 03:07:40 +0200 Subject: [PATCH 068/115] bpo-40602: _Py_hashtable_new() uses PyMem_Malloc() (GH-20046) _Py_hashtable_new() now uses PyMem_Malloc/PyMem_Free allocator by default, rather than PyMem_RawMalloc/PyMem_RawFree. PyMem_Malloc is faster than PyMem_RawMalloc for memory blocks smaller than or equal to 512 bytes. --- Python/hashtable.c | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Python/hashtable.c b/Python/hashtable.c index 22b84590105f9e..e9f02d8650e4f8 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -149,11 +149,12 @@ _Py_hashtable_new_full(size_t key_size, size_t data_size, _Py_hashtable_allocator_t alloc; if (allocator == NULL) { - alloc.malloc = PyMem_RawMalloc; - alloc.free = PyMem_RawFree; + alloc.malloc = PyMem_Malloc; + alloc.free = PyMem_Free; } - else + else { alloc = *allocator; + } ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); if (ht == NULL) From b1b4c790e7d3b5f4244450aefe3d8f01710c13f7 Mon Sep 17 00:00:00 2001 From: Tim Peters Date: Mon, 11 May 2020 21:19:20 -0500 Subject: [PATCH 069/115] bpo-40480: restore ability to join fnmatch.translate() results (GH-20049) In translate(), generate unique group names across calls. The restores the undocumented ability to get a valid regexp by joining multiple translate() results via `|`. --- Lib/fnmatch.py | 17 +++++++++++++---- Lib/test/test_fnmatch.py | 24 +++++++++++++++++++++--- 2 files changed, 34 insertions(+), 7 deletions(-) diff --git a/Lib/fnmatch.py b/Lib/fnmatch.py index d7d915d51314da..0eb1802bdb53c5 100644 --- a/Lib/fnmatch.py +++ b/Lib/fnmatch.py @@ -16,6 +16,12 @@ __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] +# Build a thread-safe incrementing counter to help create unique regexp group +# names across calls. +from itertools import count +_nextgroupnum = count().__next__ +del count + def fnmatch(name, pat): """Test whether FILENAME matches PATTERN. @@ -148,9 +154,12 @@ def translate(pat): # in a lookahead assertion, save the matched part in a group, then # consume that group via a backreference. If the overall match fails, # the lookahead assertion won't try alternatives. So the translation is: - # (?=(P.*?fixed))(?P=name) - # Group names are created as needed: g1, g2, g3, ... - groupnum = 0 + # (?=(?P.*?fixed))(?P=name) + # Group names are created as needed: g0, g1, g2, ... + # The numbers are obtained from _nextgroupnum() to ensure they're unique + # across calls and across threads. This is because people rely on the + # undocumented ability to join multiple translate() results together via + # "|" to build large regexps matching "one of many" shell patterns. while i < n: assert inp[i] is STAR i += 1 @@ -167,7 +176,7 @@ def translate(pat): add(".*") add(fixed) else: - groupnum += 1 + groupnum = _nextgroupnum() add(f"(?=(?P.*?{fixed}))(?P=g{groupnum})") assert i == n res = "".join(res) diff --git a/Lib/test/test_fnmatch.py b/Lib/test/test_fnmatch.py index 4c173069503cc6..10668e4f6103aa 100644 --- a/Lib/test/test_fnmatch.py +++ b/Lib/test/test_fnmatch.py @@ -106,6 +106,7 @@ def test_warnings(self): class TranslateTestCase(unittest.TestCase): def test_translate(self): + import re self.assertEqual(translate('*'), r'(?s:.*)\Z') self.assertEqual(translate('?'), r'(?s:.)\Z') self.assertEqual(translate('a?b*'), r'(?s:a.b.*)\Z') @@ -122,9 +123,26 @@ def test_translate(self): self.assertEqual(translate('*********A'), r'(?s:.*A)\Z') self.assertEqual(translate('A*********?[?]?'), r'(?s:A.*.[?].)\Z') # fancy translation to prevent exponential-time match failure - self.assertEqual(translate('**a*a****a'), - r'(?s:(?=(?P.*?a))(?P=g1)(?=(?P.*?a))(?P=g2).*a)\Z') - + t = translate('**a*a****a') + digits = re.findall(r'\d+', t) + self.assertEqual(len(digits), 4) + self.assertEqual(digits[0], digits[1]) + self.assertEqual(digits[2], digits[3]) + g1 = f"g{digits[0]}" # e.g., group name "g4" + g2 = f"g{digits[2]}" # e.g., group name "g5" + self.assertEqual(t, + fr'(?s:(?=(?P<{g1}>.*?a))(?P={g1})(?=(?P<{g2}>.*?a))(?P={g2}).*a)\Z') + # and try pasting multiple translate results - it's an undocumented + # feature that this works; all the pain of generating unique group + # names across calls exists to support this + r1 = translate('**a**a**a*') + r2 = translate('**b**b**b*') + r3 = translate('*c*c*c*') + fatre = "|".join([r1, r2, r3]) + self.assertTrue(re.match(fatre, 'abaccad')) + self.assertTrue(re.match(fatre, 'abxbcab')) + self.assertTrue(re.match(fatre, 'cbabcaxc')) + self.assertFalse(re.match(fatre, 'dabccbad')) class FilterTestCase(unittest.TestCase): From f3a5b7ada0c951f317dbd307de4b410e58d3e1b3 Mon Sep 17 00:00:00 2001 From: Batuhan Taskaya Date: Tue, 12 May 2020 05:32:40 +0300 Subject: [PATCH 070/115] bpo-39481: remove generic classes from ipaddress/mmap (GH-20045) These were added by mistake (see https://bugs.python.org/issue39481#msg366288). --- Lib/ipaddress.py | 7 ------- Lib/test/test_genericalias.py | 4 ---- Modules/mmapmodule.c | 2 -- 3 files changed, 13 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 439f2418174686..6e5a754c2acf1e 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -12,7 +12,6 @@ import functools -import types IPV4LENGTH = 32 IPV6LENGTH = 128 @@ -1125,8 +1124,6 @@ def is_loopback(self): return (self.network_address.is_loopback and self.broadcast_address.is_loopback) - __class_getitem__ = classmethod(types.GenericAlias) - class _BaseV4: """Base IPv4 object. @@ -1446,8 +1443,6 @@ def with_hostmask(self): return '%s/%s' % (self._string_from_ip_int(self._ip), self.hostmask) - __class_getitem__ = classmethod(types.GenericAlias) - class IPv4Network(_BaseV4, _BaseNetwork): @@ -2156,8 +2151,6 @@ def is_unspecified(self): def is_loopback(self): return self._ip == 1 and self.network.is_loopback - __class_getitem__ = classmethod(types.GenericAlias) - class IPv6Network(_BaseV6, _BaseNetwork): diff --git a/Lib/test/test_genericalias.py b/Lib/test/test_genericalias.py index 024b2f6ed6636b..4f3798e8f87d8f 100644 --- a/Lib/test/test_genericalias.py +++ b/Lib/test/test_genericalias.py @@ -17,8 +17,6 @@ from difflib import SequenceMatcher from filecmp import dircmp from fileinput import FileInput -from mmap import mmap -from ipaddress import IPv4Network, IPv4Interface, IPv6Network, IPv6Interface from itertools import chain from http.cookies import Morsel from multiprocessing.managers import ValueProxy @@ -49,7 +47,6 @@ class BaseTest(unittest.TestCase): def test_subscriptable(self): for t in (type, tuple, list, dict, set, frozenset, enumerate, - mmap, defaultdict, deque, SequenceMatcher, dircmp, @@ -74,7 +71,6 @@ def test_subscriptable(self): Sequence, MutableSequence, MappingProxyType, AsyncGeneratorType, DirEntry, - IPv4Network, IPv4Interface, IPv6Network, IPv6Interface, chain, TemporaryDirectory, SpooledTemporaryFile, Queue, SimpleQueue, diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index 6c503b3429b23a..a3e22d0a5110da 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -816,8 +816,6 @@ static struct PyMethodDef mmap_object_methods[] = { #ifdef MS_WINDOWS {"__sizeof__", (PyCFunction) mmap__sizeof__method, METH_NOARGS}, #endif - {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS, - PyDoc_STR("See PEP 585")}, {NULL, NULL} /* sentinel */ }; From 74ea6b5a7501fb393cd567fb21998d0bfeeb267c Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Tue, 12 May 2020 12:42:04 +0300 Subject: [PATCH 071/115] bpo-40593: Improve syntax errors for invalid characters in source code. (GH-20033) --- Include/cpython/unicodeobject.h | 2 + Include/errcode.h | 1 - Lib/test/test_fstring.py | 2 +- Lib/test/test_source_encoding.py | 3 + Lib/test/test_unicode_identifiers.py | 8 ++- .../2020-05-11-13-50-52.bpo-40593.yuOXj3.rst | 1 + Objects/unicodeobject.c | 64 ++++++++++++------- Parser/pegen/pegen.c | 3 - Parser/tokenizer.c | 46 ++++++++++--- Python/pythonrun.c | 3 - 10 files changed, 90 insertions(+), 43 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 81a35cdc801d09..94326876292b63 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -1222,6 +1222,8 @@ PyAPI_FUNC(void) _PyUnicode_ClearStaticStrings(void); and where the hash values are equal (i.e. a very probable match) */ PyAPI_FUNC(int) _PyUnicode_EQ(PyObject *, PyObject *); +PyAPI_FUNC(Py_ssize_t) _PyUnicode_ScanIdentifier(PyObject *); + #ifdef __cplusplus } #endif diff --git a/Include/errcode.h b/Include/errcode.h index b37cd261d5ec4d..790518b8b7730e 100644 --- a/Include/errcode.h +++ b/Include/errcode.h @@ -29,7 +29,6 @@ extern "C" { #define E_EOFS 23 /* EOF in triple-quoted string */ #define E_EOLS 24 /* EOL in single-quoted string */ #define E_LINECONT 25 /* Unexpected characters after a line continuation */ -#define E_IDENTIFIER 26 /* Invalid characters in identifier */ #define E_BADSINGLE 27 /* Ill-formed single statement input */ #ifdef __cplusplus diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index ac5aa9a76efe7c..e0bb5b56b2614f 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -583,7 +583,7 @@ def test_missing_expression(self): ]) # Different error message is raised for other whitespace characters. - self.assertAllRaise(SyntaxError, 'invalid character in identifier', + self.assertAllRaise(SyntaxError, r"invalid non-printable character U\+00A0", ["f'''{\xa0}'''", "\xa0", ]) diff --git a/Lib/test/test_source_encoding.py b/Lib/test/test_source_encoding.py index a0bd741c36ac29..5ca43461d9940d 100644 --- a/Lib/test/test_source_encoding.py +++ b/Lib/test/test_source_encoding.py @@ -57,6 +57,9 @@ def test_issue7820(self): # one byte in common with the UTF-16-LE BOM self.assertRaises(SyntaxError, eval, b'\xff\x20') + # one byte in common with the UTF-8 BOM + self.assertRaises(SyntaxError, eval, b'\xef\x20') + # two bytes in common with the UTF-8 BOM self.assertRaises(SyntaxError, eval, b'\xef\xbb\x20') diff --git a/Lib/test/test_unicode_identifiers.py b/Lib/test/test_unicode_identifiers.py index 07332c4631903e..5b9ced5d1cb837 100644 --- a/Lib/test/test_unicode_identifiers.py +++ b/Lib/test/test_unicode_identifiers.py @@ -20,9 +20,11 @@ def test_non_bmp_normalized(self): def test_invalid(self): try: from test import badsyntax_3131 - except SyntaxError as s: - self.assertEqual(str(s), - "invalid character in identifier (badsyntax_3131.py, line 2)") + except SyntaxError as err: + self.assertEqual(str(err), + "invalid character '€' (U+20AC) (badsyntax_3131.py, line 2)") + self.assertEqual(err.lineno, 2) + self.assertEqual(err.offset, 1) else: self.fail("expected exception didn't occur") diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst new file mode 100644 index 00000000000000..5587d4f49ccf97 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst @@ -0,0 +1 @@ +Improved syntax errors for invalid characters in source code. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 18b9458721de18..276547ca48a5b2 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -12309,31 +12309,22 @@ unicode_isnumeric_impl(PyObject *self) Py_RETURN_TRUE; } -int -PyUnicode_IsIdentifier(PyObject *self) +Py_ssize_t +_PyUnicode_ScanIdentifier(PyObject *self) { Py_ssize_t i; - int ready = PyUnicode_IS_READY(self); + if (PyUnicode_READY(self) == -1) + return -1; - Py_ssize_t len = ready ? PyUnicode_GET_LENGTH(self) : PyUnicode_GET_SIZE(self); + Py_ssize_t len = PyUnicode_GET_LENGTH(self); if (len == 0) { /* an empty string is not a valid identifier */ return 0; } - int kind = 0; - const void *data = NULL; - const wchar_t *wstr = NULL; - Py_UCS4 ch; - if (ready) { - kind = PyUnicode_KIND(self); - data = PyUnicode_DATA(self); - ch = PyUnicode_READ(kind, data, 0); - } - else { - wstr = _PyUnicode_WSTR(self); - ch = wstr[0]; - } + int kind = PyUnicode_KIND(self); + const void *data = PyUnicode_DATA(self); + Py_UCS4 ch = PyUnicode_READ(kind, data, 0); /* PEP 3131 says that the first character must be in XID_Start and subsequent characters in XID_Continue, and for the ASCII range, the 2.x rules apply (i.e @@ -12347,17 +12338,44 @@ PyUnicode_IsIdentifier(PyObject *self) } for (i = 1; i < len; i++) { - if (ready) { - ch = PyUnicode_READ(kind, data, i); + ch = PyUnicode_READ(kind, data, i); + if (!_PyUnicode_IsXidContinue(ch)) { + return i; } - else { - ch = wstr[i]; + } + return i; +} + +int +PyUnicode_IsIdentifier(PyObject *self) +{ + if (PyUnicode_IS_READY(self)) { + Py_ssize_t i = _PyUnicode_ScanIdentifier(self); + Py_ssize_t len = PyUnicode_GET_LENGTH(self); + /* an empty string is not a valid identifier */ + return len && i == len; + } + else { + Py_ssize_t i, len = PyUnicode_GET_SIZE(self); + if (len == 0) { + /* an empty string is not a valid identifier */ + return 0; } - if (!_PyUnicode_IsXidContinue(ch)) { + + const wchar_t *wstr = _PyUnicode_WSTR(self); + Py_UCS4 ch = wstr[0]; + if (!_PyUnicode_IsXidStart(ch) && ch != 0x5F /* LOW LINE */) { return 0; } + + for (i = 1; i < len; i++) { + ch = wstr[i]; + if (!_PyUnicode_IsXidContinue(ch)) { + return 0; + } + } + return 1; } - return 1; } /*[clinic input] diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index c80f08668b07d6..5f8c862c1f88be 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -337,9 +337,6 @@ tokenizer_error(Parser *p) case E_TOKEN: msg = "invalid token"; break; - case E_IDENTIFIER: - msg = "invalid character in identifier"; - break; case E_EOFS: RAISE_SYNTAX_ERROR("EOF while scanning triple-quoted string literal"); return -1; diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 0f2b6af5e50adf..b81fa118f216eb 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -1101,25 +1101,53 @@ static int verify_identifier(struct tok_state *tok) { PyObject *s; - int result; if (tok->decoding_erred) return 0; s = PyUnicode_DecodeUTF8(tok->start, tok->cur - tok->start, NULL); if (s == NULL) { if (PyErr_ExceptionMatches(PyExc_UnicodeDecodeError)) { - PyErr_Clear(); - tok->done = E_IDENTIFIER; - } else { + tok->done = E_DECODE; + } + else { tok->done = E_ERROR; } return 0; } - result = PyUnicode_IsIdentifier(s); - Py_DECREF(s); - if (result == 0) { - tok->done = E_IDENTIFIER; + Py_ssize_t invalid = _PyUnicode_ScanIdentifier(s); + if (invalid < 0) { + Py_DECREF(s); + tok->done = E_ERROR; + return 0; } - return result; + assert(PyUnicode_GET_LENGTH(s) > 0); + if (invalid < PyUnicode_GET_LENGTH(s)) { + Py_UCS4 ch = PyUnicode_READ_CHAR(s, invalid); + if (invalid + 1 < PyUnicode_GET_LENGTH(s)) { + /* Determine the offset in UTF-8 encoded input */ + Py_SETREF(s, PyUnicode_Substring(s, 0, invalid + 1)); + if (s != NULL) { + Py_SETREF(s, PyUnicode_AsUTF8String(s)); + } + if (s == NULL) { + tok->done = E_ERROR; + return 0; + } + tok->cur = (char *)tok->start + PyBytes_GET_SIZE(s); + } + Py_DECREF(s); + // PyUnicode_FromFormatV() does not support %X + char hex[9]; + snprintf(hex, sizeof(hex), "%04X", ch); + if (Py_UNICODE_ISPRINTABLE(ch)) { + syntaxerror(tok, "invalid character '%c' (U+%s)", ch, hex); + } + else { + syntaxerror(tok, "invalid non-printable character U+%s", hex); + } + return 0; + } + Py_DECREF(s); + return 1; } static int diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 1b79a33c814da1..45f08b707eb999 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -1603,9 +1603,6 @@ err_input(perrdetail *err) msg = "unexpected character after line continuation character"; break; - case E_IDENTIFIER: - msg = "invalid character in identifier"; - break; case E_BADSINGLE: msg = "multiple statements found while compiling a single statement"; break; From 7c6e97077525f0ad3cfa0971028313b9079449fd Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 12 May 2020 13:31:59 +0200 Subject: [PATCH 072/115] bpo-40602: Optimize _Py_hashtable for pointer keys (GH-20051) Optimize _Py_hashtable_get() and _Py_hashtable_get_entry() for pointer keys: * key_size == sizeof(void*) * hash_func == _Py_hashtable_hash_ptr * compare_func == _Py_hashtable_compare_direct Changes: * Add get_func and get_entry_func members to _Py_hashtable_t * Convert _Py_hashtable_get() and _Py_hashtable_get_entry() functions to static nline functions. * Add specialized get and get entry for pointer keys. --- Include/internal/pycore_hashtable.h | 40 ++++-- Python/hashtable.c | 207 +++++++++++++++++----------- 2 files changed, 153 insertions(+), 94 deletions(-) diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 585f76b51d7112..6e094e94376ad5 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -76,12 +76,17 @@ typedef struct { /* Forward declaration */ struct _Py_hashtable_t; +typedef struct _Py_hashtable_t _Py_hashtable_t; -typedef Py_uhash_t (*_Py_hashtable_hash_func) (struct _Py_hashtable_t *ht, +typedef Py_uhash_t (*_Py_hashtable_hash_func) (_Py_hashtable_t *ht, const void *pkey); -typedef int (*_Py_hashtable_compare_func) (struct _Py_hashtable_t *ht, +typedef int (*_Py_hashtable_compare_func) (_Py_hashtable_t *ht, const void *pkey, const _Py_hashtable_entry_t *he); +typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *ht, + const void *pkey); +typedef int (*_Py_hashtable_get_func) (_Py_hashtable_t *ht, + const void *pkey, void *data); typedef struct { /* allocate a memory block */ @@ -93,18 +98,19 @@ typedef struct { /* _Py_hashtable: table */ - -typedef struct _Py_hashtable_t { +struct _Py_hashtable_t { size_t num_buckets; size_t entries; /* Total number of entries in the table. */ _Py_slist_t *buckets; size_t key_size; size_t data_size; + _Py_hashtable_get_func get_func; + _Py_hashtable_get_entry_func get_entry_func; _Py_hashtable_hash_func hash_func; _Py_hashtable_compare_func compare_func; _Py_hashtable_allocator_t alloc; -} _Py_hashtable_t; +}; /* hash a pointer (void*) */ PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr( @@ -176,10 +182,12 @@ PyAPI_FUNC(int) _Py_hashtable_set( Don't call directly this function, but use _Py_HASHTABLE_GET_ENTRY() macro */ -PyAPI_FUNC(_Py_hashtable_entry_t*) _Py_hashtable_get_entry( - _Py_hashtable_t *ht, - size_t key_size, - const void *pkey); +static inline _Py_hashtable_entry_t * +_Py_hashtable_get_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey) +{ + assert(key_size == ht->key_size); + return ht->get_entry_func(ht, pkey); +} #define _Py_HASHTABLE_GET_ENTRY(TABLE, KEY) \ _Py_hashtable_get_entry(TABLE, sizeof(KEY), &(KEY)) @@ -189,12 +197,14 @@ PyAPI_FUNC(_Py_hashtable_entry_t*) _Py_hashtable_get_entry( exists, return 0 if the entry does not exist. Don't call directly this function, but use _Py_HASHTABLE_GET() macro */ -PyAPI_FUNC(int) _Py_hashtable_get( - _Py_hashtable_t *ht, - size_t key_size, - const void *pkey, - size_t data_size, - void *data); +static inline int +_Py_hashtable_get(_Py_hashtable_t *ht, size_t key_size, const void *pkey, + size_t data_size, void *data) +{ + assert(key_size == ht->key_size); + assert(data_size == ht->data_size); + return ht->get_func(ht, pkey, data); +} #define _Py_HASHTABLE_GET(TABLE, KEY, DATA) \ _Py_hashtable_get(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) diff --git a/Python/hashtable.c b/Python/hashtable.c index e9f02d8650e4f8..1548c2e4618c3e 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -108,7 +108,6 @@ Py_uhash_t _Py_hashtable_hash_ptr(struct _Py_hashtable_t *ht, const void *pkey) { void *key; - _Py_HASHTABLE_READ_KEY(ht, pkey, key); return (Py_uhash_t)_Py_HashPointer(key); } @@ -137,61 +136,6 @@ round_size(size_t s) } -_Py_hashtable_t * -_Py_hashtable_new_full(size_t key_size, size_t data_size, - size_t init_size, - _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func, - _Py_hashtable_allocator_t *allocator) -{ - _Py_hashtable_t *ht; - size_t buckets_size; - _Py_hashtable_allocator_t alloc; - - if (allocator == NULL) { - alloc.malloc = PyMem_Malloc; - alloc.free = PyMem_Free; - } - else { - alloc = *allocator; - } - - ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); - if (ht == NULL) - return ht; - - ht->num_buckets = round_size(init_size); - ht->entries = 0; - ht->key_size = key_size; - ht->data_size = data_size; - - buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); - ht->buckets = alloc.malloc(buckets_size); - if (ht->buckets == NULL) { - alloc.free(ht); - return NULL; - } - memset(ht->buckets, 0, buckets_size); - - ht->hash_func = hash_func; - ht->compare_func = compare_func; - ht->alloc = alloc; - return ht; -} - - -_Py_hashtable_t * -_Py_hashtable_new(size_t key_size, size_t data_size, - _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func) -{ - return _Py_hashtable_new_full(key_size, data_size, - HASHTABLE_MIN_SIZE, - hash_func, compare_func, - NULL); -} - - size_t _Py_hashtable_size(_Py_hashtable_t *ht) { @@ -251,23 +195,20 @@ _Py_hashtable_print_stats(_Py_hashtable_t *ht) _Py_hashtable_entry_t * -_Py_hashtable_get_entry(_Py_hashtable_t *ht, - size_t key_size, const void *pkey) +_Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *pkey) { - Py_uhash_t key_hash; - size_t index; - _Py_hashtable_entry_t *entry; - - assert(key_size == ht->key_size); - - key_hash = ht->hash_func(ht, pkey); - index = key_hash & (ht->num_buckets - 1); - - for (entry = TABLE_HEAD(ht, index); entry != NULL; entry = ENTRY_NEXT(entry)) { - if (entry->key_hash == key_hash && ht->compare_func(ht, pkey, entry)) + Py_uhash_t key_hash = ht->hash_func(ht, pkey); + size_t index = key_hash & (ht->num_buckets - 1); + _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); + while (1) { + if (entry == NULL) { + return NULL; + } + if (entry->key_hash == key_hash && ht->compare_func(ht, pkey, entry)) { break; + } + entry = ENTRY_NEXT(entry); } - return entry; } @@ -324,7 +265,7 @@ _Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, /* Don't write the assertion on a single line because it is interesting to know the duplicated entry if the assertion failed. The entry can be read using a debugger. */ - entry = _Py_hashtable_get_entry(ht, key_size, pkey); + entry = ht->get_entry_func(ht, pkey); assert(entry == NULL); #endif @@ -352,18 +293,62 @@ _Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, int -_Py_hashtable_get(_Py_hashtable_t *ht, size_t key_size,const void *pkey, - size_t data_size, void *data) +_Py_hashtable_get_generic(_Py_hashtable_t *ht, const void *pkey, void *data) { - _Py_hashtable_entry_t *entry; - assert(data != NULL); + _Py_hashtable_entry_t *entry = ht->get_entry_func(ht, pkey); + if (entry != NULL) { + ENTRY_READ_PDATA(ht, entry, ht->data_size, data); + return 1; + } + else { + return 0; + } +} - entry = _Py_hashtable_get_entry(ht, key_size, pkey); - if (entry == NULL) + +// Specialized for: +// key_size == sizeof(void*) +// hash_func == _Py_hashtable_hash_ptr +// compare_func == _Py_hashtable_compare_direct +_Py_hashtable_entry_t * +_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *pkey) +{ + Py_uhash_t key_hash = _Py_hashtable_hash_ptr(ht, pkey); + size_t index = key_hash & (ht->num_buckets - 1); + _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); + while (1) { + if (entry == NULL) { + return NULL; + } + if (entry->key_hash == key_hash) { + const void *pkey2 = _Py_HASHTABLE_ENTRY_PKEY(entry); + if (memcmp(pkey, pkey2, sizeof(void*)) == 0) { + break; + } + } + entry = ENTRY_NEXT(entry); + } + return entry; +} + + +// Specialized for: +// key_size == sizeof(void*) +// hash_func == _Py_hashtable_hash_ptr +// compare_func == _Py_hashtable_compare_direct +int +_Py_hashtable_get_ptr(_Py_hashtable_t *ht, const void *pkey, void *data) +{ + assert(data != NULL); + _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry_ptr(ht, pkey); + if (entry != NULL) { + ENTRY_READ_PDATA(ht, entry, ht->data_size, data); + return 1; + } + else { return 0; - ENTRY_READ_PDATA(ht, entry, data_size, data); - return 1; + } } @@ -454,6 +439,70 @@ hashtable_rehash(_Py_hashtable_t *ht) } +_Py_hashtable_t * +_Py_hashtable_new_full(size_t key_size, size_t data_size, + size_t init_size, + _Py_hashtable_hash_func hash_func, + _Py_hashtable_compare_func compare_func, + _Py_hashtable_allocator_t *allocator) +{ + _Py_hashtable_t *ht; + size_t buckets_size; + _Py_hashtable_allocator_t alloc; + + if (allocator == NULL) { + alloc.malloc = PyMem_Malloc; + alloc.free = PyMem_Free; + } + else { + alloc = *allocator; + } + + ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); + if (ht == NULL) + return ht; + + ht->num_buckets = round_size(init_size); + ht->entries = 0; + ht->key_size = key_size; + ht->data_size = data_size; + + buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); + ht->buckets = alloc.malloc(buckets_size); + if (ht->buckets == NULL) { + alloc.free(ht); + return NULL; + } + memset(ht->buckets, 0, buckets_size); + + ht->get_func = _Py_hashtable_get_generic; + ht->get_entry_func = _Py_hashtable_get_entry_generic; + ht->hash_func = hash_func; + ht->compare_func = compare_func; + ht->alloc = alloc; + if (ht->key_size == sizeof(void*) + && ht->hash_func == _Py_hashtable_hash_ptr + && ht->compare_func == _Py_hashtable_compare_direct) + { + ht->get_func = _Py_hashtable_get_ptr; + ht->get_entry_func = _Py_hashtable_get_entry_ptr; + } + return ht; +} + + +_Py_hashtable_t * +_Py_hashtable_new(size_t key_size, size_t data_size, + _Py_hashtable_hash_func hash_func, + _Py_hashtable_compare_func compare_func) +{ + return _Py_hashtable_new_full(key_size, data_size, + HASHTABLE_MIN_SIZE, + hash_func, compare_func, + NULL); +} + + void _Py_hashtable_clear(_Py_hashtable_t *ht) { From 5650e76f63a6f4ec55d00ec13f143d84a2efee39 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Tue, 12 May 2020 16:18:00 +0300 Subject: [PATCH 073/115] bpo-40596: Fix str.isidentifier() for non-canonicalized strings containing non-BMP characters on Windows. (GH-20053) --- Lib/test/test_unicode.py | 7 +++++ .../2020-05-11-20-53-52.bpo-40596.dwOH_X.rst | 2 ++ Objects/unicodeobject.c | 26 ++++++++++++++++--- 3 files changed, 31 insertions(+), 4 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index 28398896467898..2ee4e64d635303 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -720,6 +720,13 @@ def test_isidentifier(self): self.assertFalse("©".isidentifier()) self.assertFalse("0".isidentifier()) + @support.cpython_only + def test_isidentifier_legacy(self): + import _testcapi + u = '𝖀𝖓𝖎𝖈𝖔𝖉𝖊' + self.assertTrue(u.isidentifier()) + self.assertTrue(_testcapi.unicode_legacy_string(u).isidentifier()) + def test_isprintable(self): self.assertTrue("".isprintable()) self.assertTrue(" ".isprintable()) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst new file mode 100644 index 00000000000000..1252db4dc9848d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst @@ -0,0 +1,2 @@ +Fixed :meth:`str.isidentifier` for non-canonicalized strings containing +non-BMP characters on Windows. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 276547ca48a5b2..826298c23a924c 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -12356,20 +12356,38 @@ PyUnicode_IsIdentifier(PyObject *self) return len && i == len; } else { - Py_ssize_t i, len = PyUnicode_GET_SIZE(self); + Py_ssize_t i = 0, len = PyUnicode_GET_SIZE(self); if (len == 0) { /* an empty string is not a valid identifier */ return 0; } const wchar_t *wstr = _PyUnicode_WSTR(self); - Py_UCS4 ch = wstr[0]; + Py_UCS4 ch = wstr[i++]; +#if SIZEOF_WCHAR_T == 2 + if (Py_UNICODE_IS_HIGH_SURROGATE(ch) + && i < len + && Py_UNICODE_IS_LOW_SURROGATE(wstr[i])) + { + ch = Py_UNICODE_JOIN_SURROGATES(ch, wstr[i]); + i++; + } +#endif if (!_PyUnicode_IsXidStart(ch) && ch != 0x5F /* LOW LINE */) { return 0; } - for (i = 1; i < len; i++) { - ch = wstr[i]; + while (i < len) { + ch = wstr[i++]; +#if SIZEOF_WCHAR_T == 2 + if (Py_UNICODE_IS_HIGH_SURROGATE(ch) + && i < len + && Py_UNICODE_IS_LOW_SURROGATE(wstr[i])) + { + ch = Py_UNICODE_JOIN_SURROGATES(ch, wstr[i]); + i++; + } +#endif if (!_PyUnicode_IsXidContinue(ch)) { return 0; } From 4c9ea093cd752a6687864674d34250653653f743 Mon Sep 17 00:00:00 2001 From: scoder Date: Tue, 12 May 2020 16:12:41 +0200 Subject: [PATCH 074/115] bpo-38787: Add PyCFunction_CheckExact() macro for exact type checks (GH-20024) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit … now that we allow subtypes of PyCFunction. Also add PyCMethod_CheckExact() and PyCMethod_Check() for checks against the PyCMethod subtype. --- Include/cpython/methodobject.h | 3 +++ Include/methodobject.h | 3 ++- .../next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst | 2 ++ Objects/abstract.c | 2 +- Python/ceval.c | 4 ++-- 5 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst diff --git a/Include/cpython/methodobject.h b/Include/cpython/methodobject.h index 2ac2cbf36aa796..7ecbfe3b5e2fe8 100644 --- a/Include/cpython/methodobject.h +++ b/Include/cpython/methodobject.h @@ -4,6 +4,9 @@ PyAPI_DATA(PyTypeObject) PyCMethod_Type; +#define PyCMethod_CheckExact(op) Py_IS_TYPE(op, &PyCMethod_Type) +#define PyCMethod_Check(op) PyObject_TypeCheck(op, &PyCMethod_Type) + /* Macros for direct access to these values. Type checks are *not* done, so use with care. */ #define PyCFunction_GET_FUNCTION(func) \ diff --git a/Include/methodobject.h b/Include/methodobject.h index 7c7362cded35b8..12e049b4043ba5 100644 --- a/Include/methodobject.h +++ b/Include/methodobject.h @@ -13,7 +13,8 @@ extern "C" { PyAPI_DATA(PyTypeObject) PyCFunction_Type; -#define PyCFunction_Check(op) (Py_IS_TYPE(op, &PyCFunction_Type) || (PyType_IsSubtype(Py_TYPE(op), &PyCFunction_Type))) +#define PyCFunction_CheckExact(op) Py_IS_TYPE(op, &PyCFunction_Type) +#define PyCFunction_Check(op) PyObject_TypeCheck(op, &PyCFunction_Type) typedef PyObject *(*PyCFunction)(PyObject *, PyObject *); typedef PyObject *(*_PyCFunctionFast) (PyObject *, PyObject *const *, Py_ssize_t); diff --git a/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst b/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst new file mode 100644 index 00000000000000..f80be666c1c200 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst @@ -0,0 +1,2 @@ +Add PyCFunction_CheckExact() macro for exact type checks now that we allow subtypes of PyCFunction, +as well as PyCMethod_CheckExact() and PyCMethod_Check() for the new PyCMethod subtype. diff --git a/Objects/abstract.c b/Objects/abstract.c index b014f79e8d0fba..5b85b014bd22e3 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -900,7 +900,7 @@ binary_op(PyObject *v, PyObject *w, const int op_slot, const char *op_name) Py_DECREF(result); if (op_slot == NB_SLOT(nb_rshift) && - PyCFunction_Check(v) && + PyCFunction_CheckExact(v) && strcmp(((PyCFunctionObject *)v)->m_ml->ml_name, "print") == 0) { PyErr_Format(PyExc_TypeError, diff --git a/Python/ceval.c b/Python/ceval.c index e54e344a5fd514..699ad86a365b18 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -5054,7 +5054,7 @@ trace_call_function(PyThreadState *tstate, PyObject *kwnames) { PyObject *x; - if (PyCFunction_Check(func)) { + if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) { C_TRACE(x, PyObject_Vectorcall(func, args, nargs, kwnames)); return x; } @@ -5115,7 +5115,7 @@ do_call_core(PyThreadState *tstate, PyObject *func, PyObject *callargs, PyObject { PyObject *result; - if (PyCFunction_Check(func)) { + if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) { C_TRACE(result, PyObject_Call(func, callargs, kwdict)); return result; } From f453221c8b80e0570066a9375337f208d50e6406 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 12 May 2020 18:46:20 +0200 Subject: [PATCH 075/115] bpo-40602: Add _Py_HashPointerRaw() function (GH-20056) Add a new _Py_HashPointerRaw() function which avoids replacing -1 with -2 to micro-optimize hash table using pointer keys: using _Py_hashtable_hash_ptr() hash function. --- Include/pyhash.h | 2 ++ Python/hashtable.c | 2 +- Python/pyhash.c | 14 ++++++++++---- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/Include/pyhash.h b/Include/pyhash.h index 2f398589cee7ef..4437b870332bde 100644 --- a/Include/pyhash.h +++ b/Include/pyhash.h @@ -9,6 +9,8 @@ extern "C" { #ifndef Py_LIMITED_API PyAPI_FUNC(Py_hash_t) _Py_HashDouble(double); PyAPI_FUNC(Py_hash_t) _Py_HashPointer(const void*); +// Similar to _Py_HashPointer(), but don't replace -1 with -2 +PyAPI_FUNC(Py_hash_t) _Py_HashPointerRaw(const void*); PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void*, Py_ssize_t); #endif diff --git a/Python/hashtable.c b/Python/hashtable.c index 1548c2e4618c3e..90fe34e6280161 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -109,7 +109,7 @@ _Py_hashtable_hash_ptr(struct _Py_hashtable_t *ht, const void *pkey) { void *key; _Py_HASHTABLE_READ_KEY(ht, pkey, key); - return (Py_uhash_t)_Py_HashPointer(key); + return (Py_uhash_t)_Py_HashPointerRaw(key); } diff --git a/Python/pyhash.c b/Python/pyhash.c index a6f42e71cf643c..3843079fbbce14 100644 --- a/Python/pyhash.c +++ b/Python/pyhash.c @@ -129,16 +129,22 @@ _Py_HashDouble(double v) } Py_hash_t -_Py_HashPointer(const void *p) +_Py_HashPointerRaw(const void *p) { - Py_hash_t x; size_t y = (size_t)p; /* bottom 3 or 4 bits are likely to be 0; rotate y by 4 to avoid excessive hash collisions for dicts and sets */ y = (y >> 4) | (y << (8 * SIZEOF_VOID_P - 4)); - x = (Py_hash_t)y; - if (x == -1) + return (Py_hash_t)y; +} + +Py_hash_t +_Py_HashPointer(const void *p) +{ + Py_hash_t x = _Py_HashPointerRaw(p); + if (x == -1) { x = -2; + } return x; } From d6b727e2c947240804b8e434b305ba2890122550 Mon Sep 17 00:00:00 2001 From: Steve Dower Date: Tue, 12 May 2020 23:32:32 +0100 Subject: [PATCH 076/115] bpo-40501: Replace ctypes code in uuid with native module (GH-19948) --- Lib/test/test_uuid.py | 27 +-- Lib/uuid.py | 180 +++--------------- .../2020-05-06-00-41-11.bpo-40501._61wv_.rst | 2 + Modules/_uuidmodule.c | 54 +++++- PCbuild/_uuid.vcxproj | 115 +++++++++++ PCbuild/_uuid.vcxproj.filters | 14 ++ PCbuild/pcbuild.proj | 2 +- PCbuild/pcbuild.sln | 38 +++- Tools/msi/lib/lib_files.wxs | 2 +- 9 files changed, 248 insertions(+), 186 deletions(-) create mode 100644 Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst create mode 100644 PCbuild/_uuid.vcxproj create mode 100644 PCbuild/_uuid.vcxproj.filters diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py index ac166ced38afbc..b1c92427dd270b 100644 --- a/Lib/test/test_uuid.py +++ b/Lib/test/test_uuid.py @@ -852,17 +852,6 @@ def test_netstat_getnode(self): node = self.uuid._netstat_getnode() self.check_node(node, 'netstat') - @unittest.skipUnless(os.name == 'nt', 'requires Windows') - def test_ipconfig_getnode(self): - node = self.uuid._ipconfig_getnode() - self.check_node(node, 'ipconfig') - - @unittest.skipUnless(importable('win32wnet'), 'requires win32wnet') - @unittest.skipUnless(importable('netbios'), 'requires netbios') - def test_netbios_getnode(self): - node = self.uuid._netbios_getnode() - self.check_node(node) - def test_random_getnode(self): node = self.uuid._random_getnode() # The multicast bit, i.e. the least significant bit of first octet, @@ -874,6 +863,13 @@ def test_random_getnode(self): node2 = self.uuid._random_getnode() self.assertNotEqual(node2, node, '%012x' % node) +class TestInternalsWithoutExtModule(BaseTestInternals, unittest.TestCase): + uuid = py_uuid + +@unittest.skipUnless(c_uuid, 'requires the C _uuid module') +class TestInternalsWithExtModule(BaseTestInternals, unittest.TestCase): + uuid = c_uuid + @unittest.skipUnless(os.name == 'posix', 'requires Posix') def test_unix_getnode(self): if not importable('_uuid') and not importable('ctypes'): @@ -885,19 +881,10 @@ def test_unix_getnode(self): self.check_node(node, 'unix') @unittest.skipUnless(os.name == 'nt', 'requires Windows') - @unittest.skipUnless(importable('ctypes'), 'requires ctypes') def test_windll_getnode(self): node = self.uuid._windll_getnode() self.check_node(node) -class TestInternalsWithoutExtModule(BaseTestInternals, unittest.TestCase): - uuid = py_uuid - -@unittest.skipUnless(c_uuid, 'requires the C _uuid module') -class TestInternalsWithExtModule(BaseTestInternals, unittest.TestCase): - uuid = c_uuid - - if __name__ == '__main__': unittest.main() diff --git a/Lib/uuid.py b/Lib/uuid.py index 2799c75ba6a1ad..9ddce813fc4692 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -555,178 +555,44 @@ def _netstat_getnode(): return _find_mac_under_heading('netstat', '-ian', b'Address') def _ipconfig_getnode(): - """Get the hardware address on Windows by running ipconfig.exe.""" - import os, re, subprocess - first_local_mac = None - dirs = ['', r'c:\windows\system32', r'c:\winnt\system32'] - try: - import ctypes - buffer = ctypes.create_string_buffer(300) - ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300) - dirs.insert(0, buffer.value.decode('mbcs')) - except: - pass - for dir in dirs: - try: - proc = subprocess.Popen([os.path.join(dir, 'ipconfig'), '/all'], - stdout=subprocess.PIPE, - encoding="oem") - except OSError: - continue - with proc: - for line in proc.stdout: - value = line.split(':')[-1].strip().lower() - if re.fullmatch('(?:[0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value): - mac = int(value.replace('-', ''), 16) - if _is_universal(mac): - return mac - first_local_mac = first_local_mac or mac - return first_local_mac or None + """[DEPRECATED] Get the hardware address on Windows.""" + # bpo-40501: UuidCreateSequential() is now the only supported approach + return _windll_getnode() def _netbios_getnode(): - """Get the hardware address on Windows using NetBIOS calls. - See http://support.microsoft.com/kb/118623 for details.""" - import win32wnet, netbios - first_local_mac = None - ncb = netbios.NCB() - ncb.Command = netbios.NCBENUM - ncb.Buffer = adapters = netbios.LANA_ENUM() - adapters._pack() - if win32wnet.Netbios(ncb) != 0: - return None - adapters._unpack() - for i in range(adapters.length): - ncb.Reset() - ncb.Command = netbios.NCBRESET - ncb.Lana_num = ord(adapters.lana[i]) - if win32wnet.Netbios(ncb) != 0: - continue - ncb.Reset() - ncb.Command = netbios.NCBASTAT - ncb.Lana_num = ord(adapters.lana[i]) - ncb.Callname = '*'.ljust(16) - ncb.Buffer = status = netbios.ADAPTER_STATUS() - if win32wnet.Netbios(ncb) != 0: - continue - status._unpack() - bytes = status.adapter_address[:6] - if len(bytes) != 6: - continue - mac = int.from_bytes(bytes, 'big') - if _is_universal(mac): - return mac - first_local_mac = first_local_mac or mac - return first_local_mac or None + """[DEPRECATED] Get the hardware address on Windows.""" + # bpo-40501: UuidCreateSequential() is now the only supported approach + return _windll_getnode() -_generate_time_safe = _UuidCreate = None -_has_uuid_generate_time_safe = None - # Import optional C extension at toplevel, to help disabling it when testing try: import _uuid + _generate_time_safe = getattr(_uuid, "generate_time_safe", None) + _UuidCreate = getattr(_uuid, "UuidCreate", None) + _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe except ImportError: _uuid = None + _generate_time_safe = None + _UuidCreate = None + _has_uuid_generate_time_safe = None def _load_system_functions(): - """ - Try to load platform-specific functions for generating uuids. - """ - global _generate_time_safe, _UuidCreate, _has_uuid_generate_time_safe - - if _has_uuid_generate_time_safe is not None: - return - - _has_uuid_generate_time_safe = False - - if sys.platform == "darwin" and int(os.uname().release.split('.')[0]) < 9: - # The uuid_generate_* functions are broken on MacOS X 10.5, as noted - # in issue #8621 the function generates the same sequence of values - # in the parent process and all children created using fork (unless - # those children use exec as well). - # - # Assume that the uuid_generate functions are broken from 10.5 onward, - # the test can be adjusted when a later version is fixed. - pass - elif _uuid is not None: - _generate_time_safe = _uuid.generate_time_safe - _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe - return - - try: - # If we couldn't find an extension module, try ctypes to find - # system routines for UUID generation. - # Thanks to Thomas Heller for ctypes and for his help with its use here. - import ctypes - import ctypes.util - - # The uuid_generate_* routines are provided by libuuid on at least - # Linux and FreeBSD, and provided by libc on Mac OS X. - _libnames = ['uuid'] - if not sys.platform.startswith('win'): - _libnames.append('c') - for libname in _libnames: - try: - lib = ctypes.CDLL(ctypes.util.find_library(libname)) - except Exception: # pragma: nocover - continue - # Try to find the safe variety first. - if hasattr(lib, 'uuid_generate_time_safe'): - _uuid_generate_time_safe = lib.uuid_generate_time_safe - # int uuid_generate_time_safe(uuid_t out); - def _generate_time_safe(): - _buffer = ctypes.create_string_buffer(16) - res = _uuid_generate_time_safe(_buffer) - return bytes(_buffer.raw), res - _has_uuid_generate_time_safe = True - break - - elif hasattr(lib, 'uuid_generate_time'): # pragma: nocover - _uuid_generate_time = lib.uuid_generate_time - # void uuid_generate_time(uuid_t out); - _uuid_generate_time.restype = None - def _generate_time_safe(): - _buffer = ctypes.create_string_buffer(16) - _uuid_generate_time(_buffer) - return bytes(_buffer.raw), None - break - - # On Windows prior to 2000, UuidCreate gives a UUID containing the - # hardware address. On Windows 2000 and later, UuidCreate makes a - # random UUID and UuidCreateSequential gives a UUID containing the - # hardware address. These routines are provided by the RPC runtime. - # NOTE: at least on Tim's WinXP Pro SP2 desktop box, while the last - # 6 bytes returned by UuidCreateSequential are fixed, they don't appear - # to bear any relationship to the MAC address of any network device - # on the box. - try: - lib = ctypes.windll.rpcrt4 - except: - lib = None - _UuidCreate = getattr(lib, 'UuidCreateSequential', - getattr(lib, 'UuidCreate', None)) - - except Exception as exc: - import warnings - warnings.warn(f"Could not find fallback ctypes uuid functions: {exc}", - ImportWarning) + """[DEPRECATED] Platform-specific functions loaded at import time""" def _unix_getnode(): - """Get the hardware address on Unix using the _uuid extension module - or ctypes.""" - _load_system_functions() - uuid_time, _ = _generate_time_safe() - return UUID(bytes=uuid_time).node + """Get the hardware address on Unix using the _uuid extension module.""" + if _generate_time_safe: + uuid_time, _ = _generate_time_safe() + return UUID(bytes=uuid_time).node def _windll_getnode(): - """Get the hardware address on Windows using ctypes.""" - import ctypes - _load_system_functions() - _buffer = ctypes.create_string_buffer(16) - if _UuidCreate(_buffer) == 0: - return UUID(bytes=bytes_(_buffer.raw)).node + """Get the hardware address on Windows using the _uuid extension module.""" + if _UuidCreate: + uuid_bytes = _UuidCreate() + return UUID(bytes_le=uuid_bytes).node def _random_getnode(): """Get a random node ID.""" @@ -755,7 +621,8 @@ def _random_getnode(): elif _DARWIN: _OS_GETTERS = [_ifconfig_getnode, _arp_getnode, _netstat_getnode] elif _WINDOWS: - _OS_GETTERS = [_netbios_getnode, _ipconfig_getnode] + # bpo-40201: _windll_getnode will always succeed, so these are not needed + _OS_GETTERS = [] elif _AIX: _OS_GETTERS = [_netstat_getnode] else: @@ -802,7 +669,6 @@ def uuid1(node=None, clock_seq=None): # When the system provides a version-1 UUID generator, use it (but don't # use UuidCreate here because its UUIDs don't conform to RFC 4122). - _load_system_functions() if _generate_time_safe is not None and node is clock_seq is None: uuid_time, safely_generated = _generate_time_safe() try: diff --git a/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst b/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst new file mode 100644 index 00000000000000..5ce22eb8a92eef --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst @@ -0,0 +1,2 @@ +:mod:`uuid` no longer uses :mod:`ctypes` to load :file:`libuuid` or +:file:`rpcrt4.dll` at runtime. diff --git a/Modules/_uuidmodule.c b/Modules/_uuidmodule.c index 3be6c848ad6457..3f33e22a055c6d 100644 --- a/Modules/_uuidmodule.c +++ b/Modules/_uuidmodule.c @@ -1,5 +1,5 @@ /* - * Python UUID module that wraps libuuid - + * Python UUID module that wraps libuuid or Windows rpcrt4.dll. * DCE compatible Universally Unique Identifier library. */ @@ -12,6 +12,12 @@ #include #endif +#ifdef MS_WINDOWS +#include +#endif + +#ifndef MS_WINDOWS + static PyObject * py_uuid_generate_time_safe(PyObject *Py_UNUSED(context), PyObject *Py_UNUSED(ignored)) @@ -31,17 +37,50 @@ py_uuid_generate_time_safe(PyObject *Py_UNUSED(context), return Py_BuildValue("y#i", buf, sizeof(uuid), (int) status); # else return Py_BuildValue("y#i", (const char *) &uuid, sizeof(uuid), (int) status); -# endif -#else +# endif /* HAVE_UUID_CREATE */ +#else /* HAVE_UUID_GENERATE_TIME_SAFE */ uuid_generate_time(uuid); return Py_BuildValue("y#O", (const char *) uuid, sizeof(uuid), Py_None); -#endif +#endif /* HAVE_UUID_GENERATE_TIME_SAFE */ } +#else /* MS_WINDOWS */ + +static PyObject * +py_UuidCreate(PyObject *Py_UNUSED(context), + PyObject *Py_UNUSED(ignored)) +{ + UUID uuid; + RPC_STATUS res; + + Py_BEGIN_ALLOW_THREADS + res = UuidCreateSequential(&uuid); + Py_END_ALLOW_THREADS + + switch (res) { + case RPC_S_OK: + case RPC_S_UUID_LOCAL_ONLY: + case RPC_S_UUID_NO_ADDRESS: + /* + All success codes, but the latter two indicate that the UUID is random + rather than based on the MAC address. If the OS can't figure this out, + neither can we, so we'll take it anyway. + */ + return Py_BuildValue("y#", (const char *)&uuid, sizeof(uuid)); + } + PyErr_SetFromWindowsErr(res); + return NULL; +} + +#endif /* MS_WINDOWS */ + + static int uuid_exec(PyObject *module) { assert(sizeof(uuid_t) == 16); -#ifdef HAVE_UUID_GENERATE_TIME_SAFE +#if defined(MS_WINDOWS) + int has_uuid_generate_time_safe = 0; +#elif defined(HAVE_UUID_GENERATE_TIME_SAFE) int has_uuid_generate_time_safe = 1; #else int has_uuid_generate_time_safe = 0; @@ -54,7 +93,12 @@ uuid_exec(PyObject *module) { } static PyMethodDef uuid_methods[] = { +#if defined(HAVE_UUID_UUID_H) || defined(HAVE_UUID_H) {"generate_time_safe", py_uuid_generate_time_safe, METH_NOARGS, NULL}, +#endif +#if defined(MS_WINDOWS) + {"UuidCreate", py_UuidCreate, METH_NOARGS, NULL}, +#endif {NULL, NULL, 0, NULL} /* sentinel */ }; diff --git a/PCbuild/_uuid.vcxproj b/PCbuild/_uuid.vcxproj new file mode 100644 index 00000000000000..2437b7eb2d9399 --- /dev/null +++ b/PCbuild/_uuid.vcxproj @@ -0,0 +1,115 @@ + + + + + Debug + ARM + + + Debug + ARM64 + + + Debug + Win32 + + + Debug + x64 + + + PGInstrument + ARM + + + PGInstrument + ARM64 + + + PGInstrument + Win32 + + + PGInstrument + x64 + + + PGUpdate + ARM + + + PGUpdate + ARM64 + + + PGUpdate + Win32 + + + PGUpdate + x64 + + + Release + ARM + + + Release + ARM64 + + + Release + Win32 + + + Release + x64 + + + + {CB435430-EBB1-478B-8F4E-C256F6838F55} + _uuid + Win32Proj + false + + + + + DynamicLibrary + NotSet + + + + .pyd + + + + + + + + + + <_ProjectFileVersion>10.0.30319.1 + + + + rpcrt4.lib;%(AdditionalDependencies) + + + + + + + + + + + {cf7ac3d1-e2df-41d2-bea6-1e2556cdea26} + false + + + + + + \ No newline at end of file diff --git a/PCbuild/_uuid.vcxproj.filters b/PCbuild/_uuid.vcxproj.filters new file mode 100644 index 00000000000000..17949292314345 --- /dev/null +++ b/PCbuild/_uuid.vcxproj.filters @@ -0,0 +1,14 @@ + + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + + + Source Files + + + \ No newline at end of file diff --git a/PCbuild/pcbuild.proj b/PCbuild/pcbuild.proj index 22a9eed18d42bb..9c4d352b434488 100644 --- a/PCbuild/pcbuild.proj +++ b/PCbuild/pcbuild.proj @@ -51,7 +51,7 @@ - + diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index 6dc0139bc42af4..6d4c9506e5ec1a 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -1,6 +1,6 @@ Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 15 -VisualStudioVersion = 15.0.27130.2024 +# Visual Studio Version 16 +VisualStudioVersion = 16.0.30028.174 MinimumVisualStudioVersion = 10.0.40219.1 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{553EC33E-9816-4996-A660-5D6186A0B0B3}" ProjectSection(SolutionItems) = preProject @@ -103,6 +103,8 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "venvwlauncher", "venvwlaunc EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythonw_uwp", "pythonw_uwp.vcxproj", "{AB603547-1E2A-45B3-9E09-B04596006393}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_uuid", "_uuid.vcxproj", "{CB435430-EBB1-478B-8F4E-C256F6838F55}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|ARM = Debug|ARM @@ -1440,6 +1442,38 @@ Global {AB603547-1E2A-45B3-9E09-B04596006393}.Release|Win32.Build.0 = Release|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.Release|x64.ActiveCfg = Release|x64 {AB603547-1E2A-45B3-9E09-B04596006393}.Release|x64.Build.0 = Release|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM.ActiveCfg = Debug|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM.Build.0 = Debug|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM64.ActiveCfg = Debug|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM64.Build.0 = Debug|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|Win32.ActiveCfg = Debug|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|Win32.Build.0 = Debug|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|x64.ActiveCfg = Debug|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|x64.Build.0 = Debug|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM.ActiveCfg = PGInstrument|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM.Build.0 = PGInstrument|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM64.ActiveCfg = PGInstrument|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM64.Build.0 = PGInstrument|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|Win32.ActiveCfg = PGInstrument|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|Win32.Build.0 = PGInstrument|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|x64.ActiveCfg = PGInstrument|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|x64.Build.0 = PGInstrument|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM.ActiveCfg = PGUpdate|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM.Build.0 = PGUpdate|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM64.ActiveCfg = PGUpdate|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM64.Build.0 = PGUpdate|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|Win32.ActiveCfg = PGUpdate|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|Win32.Build.0 = PGUpdate|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|x64.ActiveCfg = PGUpdate|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|x64.Build.0 = PGUpdate|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM.ActiveCfg = Release|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM.Build.0 = Release|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM64.ActiveCfg = Release|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM64.Build.0 = Release|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|Win32.ActiveCfg = Release|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|Win32.Build.0 = Release|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|x64.ActiveCfg = Release|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|x64.Build.0 = Release|x64 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/Tools/msi/lib/lib_files.wxs b/Tools/msi/lib/lib_files.wxs index b462372512f6de..95541599b9bb29 100644 --- a/Tools/msi/lib/lib_files.wxs +++ b/Tools/msi/lib/lib_files.wxs @@ -1,6 +1,6 @@  - + From 3d54211e6eddc2f2586b9a20543754947c7ad325 Mon Sep 17 00:00:00 2001 From: Allen Guo Date: Tue, 12 May 2020 18:54:18 -0400 Subject: [PATCH 077/115] Fix Wikipedia link (GH-20031) --- Doc/library/functools.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 204e66ae5ac407..a44eb85b27dbab 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -158,11 +158,11 @@ The :mod:`functools` module defines the following functions: bypassing the cache, or for rewrapping the function with a different cache. An `LRU (least recently used) cache - `_ works - best when the most recent calls are the best predictors of upcoming calls (for - example, the most popular articles on a news server tend to change each day). - The cache's size limit assures that the cache does not grow without bound on - long-running processes such as web servers. + `_ + works best when the most recent calls are the best predictors of upcoming + calls (for example, the most popular articles on a news server tend to + change each day). The cache's size limit assures that the cache does not + grow without bound on long-running processes such as web servers. In general, the LRU cache should only be used when you want to reuse previously computed values. Accordingly, it doesn't make sense to cache From 9e2ca1742076169089b818d0883688a2ddd9964a Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 13 May 2020 01:36:47 +0200 Subject: [PATCH 078/115] bpo-40609: Rewrite how _tracemalloc handles domains (GH-20059) Rewrite how the _tracemalloc module stores traces of other domains. Rather than storing the domain inside the key, it now uses a new hash table with the domain as the key, and the data is a per-domain traces hash table. * Add tracemalloc_domain hash table. * Remove _Py_tracemalloc_config.use_domain. * Remove pointer_t and related functions. --- Include/internal/pycore_pymem.h | 7 +- Modules/_tracemalloc.c | 327 +++++++++++++++++--------------- 2 files changed, 174 insertions(+), 160 deletions(-) diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index 18203e30f5cfe3..3d925e2250d252 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -88,17 +88,12 @@ struct _PyTraceMalloc_Config { /* limit of the number of frames in a traceback, 1 by default. Variable protected by the GIL. */ int max_nframe; - - /* use domain in trace key? - Variable protected by the GIL. */ - int use_domain; }; #define _PyTraceMalloc_Config_INIT \ {.initialized = TRACEMALLOC_NOT_INITIALIZED, \ .tracing = 0, \ - .max_nframe = 1, \ - .use_domain = 0} + .max_nframe = 1} PyAPI_DATA(struct _PyTraceMalloc_Config) _Py_tracemalloc_config; diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index f22338166d0dc1..7e31abe05fb6b8 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -47,16 +47,6 @@ static PyThread_type_lock tables_lock; #define DEFAULT_DOMAIN 0 -/* Pack the frame_t structure to reduce the memory footprint. */ -typedef struct -#ifdef __GNUC__ -__attribute__((packed)) -#endif -{ - uintptr_t ptr; - unsigned int domain; -} pointer_t; - /* Pack the frame_t structure to reduce the memory footprint on 64-bit architectures: 12 bytes instead of 16. */ typedef struct @@ -133,6 +123,10 @@ static _Py_hashtable_t *tracemalloc_tracebacks = NULL; Protected by TABLES_LOCK(). */ static _Py_hashtable_t *tracemalloc_traces = NULL; +/* domain (unsigned int) => traces (_Py_hashtable_t). + Protected by TABLES_LOCK(). */ +static _Py_hashtable_t *tracemalloc_domains = NULL; + #ifdef TRACE_DEBUG static void @@ -235,32 +229,11 @@ hashtable_compare_unicode(_Py_hashtable_t *ht, const void *pkey, static Py_uhash_t -hashtable_hash_pointer_t(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_uint(_Py_hashtable_t *ht, const void *pkey) { - pointer_t ptr; - Py_uhash_t hash; - - _Py_HASHTABLE_READ_KEY(ht, pkey, ptr); - - hash = (Py_uhash_t)_Py_HashPointer((void*)ptr.ptr); - hash ^= ptr.domain; - return hash; -} - - -static int -hashtable_compare_pointer_t(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) -{ - pointer_t ptr1, ptr2; - - _Py_HASHTABLE_READ_KEY(ht, pkey, ptr1); - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, ptr2); - - /* compare pointer before domain, because pointer is more likely to be - different */ - return (ptr1.ptr == ptr2.ptr && ptr1.domain == ptr2.domain); - + unsigned int key; + _Py_HASHTABLE_READ_KEY(ht, pkey, key); + return (Py_uhash_t)key; } @@ -501,77 +474,74 @@ traceback_new(void) } -static int -tracemalloc_use_domain_cb(_Py_hashtable_t *old_traces, - _Py_hashtable_entry_t *entry, void *user_data) +static _Py_hashtable_t* +tracemalloc_create_traces_table(void) { - uintptr_t ptr; - pointer_t key; - _Py_hashtable_t *new_traces = (_Py_hashtable_t *)user_data; - const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(old_traces, entry); + return hashtable_new(sizeof(uintptr_t), + sizeof(trace_t), + _Py_hashtable_hash_ptr, + _Py_hashtable_compare_direct); +} - _Py_HASHTABLE_ENTRY_READ_KEY(old_traces, entry, ptr); - key.ptr = ptr; - key.domain = DEFAULT_DOMAIN; - return _Py_hashtable_set(new_traces, - sizeof(key), &key, - old_traces->data_size, pdata); +static _Py_hashtable_t* +tracemalloc_create_domains_table(void) +{ + return hashtable_new(sizeof(unsigned int), + sizeof(_Py_hashtable_t *), + hashtable_hash_uint, + _Py_hashtable_compare_direct); } -/* Convert tracemalloc_traces from compact key (uintptr_t) to pointer_t key. - * Return 0 on success, -1 on error. */ static int -tracemalloc_use_domain(void) +tracemalloc_destroy_domains_cb(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry, + void *user_data) { - _Py_hashtable_t *new_traces = NULL; - - assert(!_Py_tracemalloc_config.use_domain); - - new_traces = hashtable_new(sizeof(pointer_t), - sizeof(trace_t), - hashtable_hash_pointer_t, - hashtable_compare_pointer_t); - if (new_traces == NULL) { - return -1; - } + _Py_hashtable_t *traces; + _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + _Py_hashtable_destroy(traces); + return 0; +} - if (_Py_hashtable_foreach(tracemalloc_traces, tracemalloc_use_domain_cb, - new_traces) < 0) - { - _Py_hashtable_destroy(new_traces); - return -1; - } - _Py_hashtable_destroy(tracemalloc_traces); - tracemalloc_traces = new_traces; +static void +tracemalloc_destroy_domains(_Py_hashtable_t *domains) +{ + _Py_hashtable_foreach(domains, tracemalloc_destroy_domains_cb, NULL); + _Py_hashtable_destroy(domains); +} - _Py_tracemalloc_config.use_domain = 1; - return 0; +static _Py_hashtable_t* +tracemalloc_get_traces_table(unsigned int domain) +{ + if (domain == DEFAULT_DOMAIN) { + return tracemalloc_traces; + } + else { + _Py_hashtable_t *traces = NULL; + (void)_Py_HASHTABLE_GET(tracemalloc_domains, domain, traces); + return traces; + } } static void tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) { - trace_t trace; - int removed; - assert(_Py_tracemalloc_config.tracing); - if (_Py_tracemalloc_config.use_domain) { - pointer_t key = {ptr, domain}; - removed = _Py_HASHTABLE_POP(tracemalloc_traces, key, trace); - } - else { - removed = _Py_HASHTABLE_POP(tracemalloc_traces, ptr, trace); - } - if (!removed) { + _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); + if (!traces) { return; } + trace_t trace; + if (!_Py_HASHTABLE_POP(traces, ptr, trace)) { + return; + } assert(tracemalloc_traced_memory >= trace.size); tracemalloc_traced_memory -= trace.size; } @@ -584,54 +554,43 @@ static int tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, size_t size) { - pointer_t key = {ptr, domain}; - traceback_t *traceback; - trace_t trace; - _Py_hashtable_entry_t* entry; - int res; - assert(_Py_tracemalloc_config.tracing); - traceback = traceback_new(); + traceback_t *traceback = traceback_new(); if (traceback == NULL) { return -1; } - if (!_Py_tracemalloc_config.use_domain && domain != DEFAULT_DOMAIN) { - /* first trace using a non-zero domain whereas traces use compact - (uintptr_t) keys: switch to pointer_t keys. */ - if (tracemalloc_use_domain() < 0) { + _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); + if (traces == NULL) { + traces = tracemalloc_create_traces_table(); + if (traces == NULL) { return -1; } - } - if (_Py_tracemalloc_config.use_domain) { - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, key); - } - else { - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr); + if (_Py_HASHTABLE_SET(tracemalloc_domains, domain, traces) < 0) { + _Py_hashtable_destroy(traces); + return -1; + } } + _Py_hashtable_entry_t* entry = _Py_HASHTABLE_GET_ENTRY(traces, ptr); + trace_t trace; if (entry != NULL) { /* the memory block is already tracked */ - _Py_HASHTABLE_ENTRY_READ_DATA(tracemalloc_traces, entry, trace); + _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); assert(tracemalloc_traced_memory >= trace.size); tracemalloc_traced_memory -= trace.size; trace.size = size; trace.traceback = traceback; - _Py_HASHTABLE_ENTRY_WRITE_DATA(tracemalloc_traces, entry, trace); + _Py_HASHTABLE_ENTRY_WRITE_DATA(traces, entry, trace); } else { trace.size = size; trace.traceback = traceback; - if (_Py_tracemalloc_config.use_domain) { - res = _Py_HASHTABLE_SET(tracemalloc_traces, key, trace); - } - else { - res = _Py_HASHTABLE_SET(tracemalloc_traces, ptr, trace); - } + int res = _Py_HASHTABLE_SET(traces, ptr, trace); if (res != 0) { return res; } @@ -639,8 +598,9 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, assert(tracemalloc_traced_memory <= SIZE_MAX - size); tracemalloc_traced_memory += size; - if (tracemalloc_traced_memory > tracemalloc_peak_traced_memory) + if (tracemalloc_traced_memory > tracemalloc_peak_traced_memory) { tracemalloc_peak_traced_memory = tracemalloc_traced_memory; + } return 0; } @@ -691,7 +651,7 @@ tracemalloc_realloc(void *ctx, void *ptr, size_t new_size) TABLES_LOCK(); /* tracemalloc_add_trace() updates the trace if there is already - a trace at address (domain, ptr2) */ + a trace at address ptr2 */ if (ptr2 != ptr) { REMOVE_TRACE(ptr); } @@ -928,6 +888,7 @@ tracemalloc_clear_traces(void) TABLES_LOCK(); _Py_hashtable_clear(tracemalloc_traces); + _Py_hashtable_clear(tracemalloc_domains); tracemalloc_traced_memory = 0; tracemalloc_peak_traced_memory = 0; TABLES_UNLOCK(); @@ -983,21 +944,11 @@ tracemalloc_init(void) hashtable_hash_traceback, hashtable_compare_traceback); - if (_Py_tracemalloc_config.use_domain) { - tracemalloc_traces = hashtable_new(sizeof(pointer_t), - sizeof(trace_t), - hashtable_hash_pointer_t, - hashtable_compare_pointer_t); - } - else { - tracemalloc_traces = hashtable_new(sizeof(uintptr_t), - sizeof(trace_t), - _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); - } + tracemalloc_traces = tracemalloc_create_traces_table(); + tracemalloc_domains = tracemalloc_create_domains_table(); if (tracemalloc_filenames == NULL || tracemalloc_tracebacks == NULL - || tracemalloc_traces == NULL) { + || tracemalloc_traces == NULL || tracemalloc_domains == NULL) { PyErr_NoMemory(); return -1; } @@ -1029,9 +980,10 @@ tracemalloc_deinit(void) tracemalloc_stop(); /* destroy hash tables */ + tracemalloc_destroy_domains(tracemalloc_domains); + _Py_hashtable_destroy(tracemalloc_traces); _Py_hashtable_destroy(tracemalloc_tracebacks); _Py_hashtable_destroy(tracemalloc_filenames); - _Py_hashtable_destroy(tracemalloc_traces); #if defined(TRACE_RAW_MALLOC) if (tables_lock != NULL) { @@ -1279,31 +1231,45 @@ trace_to_pyobject(unsigned int domain, trace_t *trace, typedef struct { _Py_hashtable_t *traces; + _Py_hashtable_t *domains; _Py_hashtable_t *tracebacks; PyObject *list; + unsigned int domain; } get_traces_t; +static int +tracemalloc_get_traces_copy_domain(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry, + void *user_data) +{ + get_traces_t *get_traces = user_data; + + unsigned int domain; + _Py_HASHTABLE_ENTRY_READ_KEY(domains, entry, domain); + _Py_hashtable_t *traces; + _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + + _Py_hashtable_t *traces2 = _Py_hashtable_copy(traces); + if (_Py_HASHTABLE_SET(get_traces->domains, domain, traces2) < 0) { + _Py_hashtable_destroy(traces2); + return -1; + } + return 0; +} + + static int tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entry, void *user_data) { get_traces_t *get_traces = user_data; - unsigned int domain; trace_t trace; PyObject *tracemalloc_obj; int res; - if (_Py_tracemalloc_config.use_domain) { - pointer_t key; - _Py_HASHTABLE_ENTRY_READ_KEY(traces, entry, key); - domain = key.domain; - } - else { - domain = DEFAULT_DOMAIN; - } _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); - tracemalloc_obj = trace_to_pyobject(domain, &trace, get_traces->tracebacks); + tracemalloc_obj = trace_to_pyobject(get_traces->domain, &trace, get_traces->tracebacks); if (tracemalloc_obj == NULL) return 1; @@ -1316,6 +1282,25 @@ tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entr } +static int +tracemalloc_get_traces_domain(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry, + void *user_data) +{ + get_traces_t *get_traces = user_data; + + unsigned int domain; + _Py_HASHTABLE_ENTRY_READ_KEY(domains, entry, domain); + _Py_hashtable_t *traces; + _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + + get_traces->domain = domain; + return _Py_hashtable_foreach(traces, + tracemalloc_get_traces_fill, + get_traces); +} + + static int tracemalloc_pyobject_decref_cb(_Py_hashtable_t *tracebacks, _Py_hashtable_entry_t *entry, @@ -1345,9 +1330,9 @@ _tracemalloc__get_traces_impl(PyObject *module) /*[clinic end generated code: output=e9929876ced4b5cc input=6c7d2230b24255aa]*/ { get_traces_t get_traces; - int err; - + get_traces.domain = DEFAULT_DOMAIN; get_traces.traces = NULL; + get_traces.domains = NULL; get_traces.tracebacks = NULL; get_traces.list = PyList_New(0); if (get_traces.list == NULL) @@ -1363,28 +1348,51 @@ _tracemalloc__get_traces_impl(PyObject *module) _Py_hashtable_hash_ptr, _Py_hashtable_compare_direct); if (get_traces.tracebacks == NULL) { - PyErr_NoMemory(); - goto error; + goto no_memory; } + get_traces.domains = tracemalloc_create_domains_table(); + if (get_traces.domains == NULL) { + goto no_memory; + } + + int err; + + // Copy all traces so tracemalloc_get_traces_fill() doesn't have to disable + // temporarily tracemalloc which would impact other threads and so would + // miss allocations while get_traces() is called. TABLES_LOCK(); get_traces.traces = _Py_hashtable_copy(tracemalloc_traces); + err = _Py_hashtable_foreach(tracemalloc_domains, + tracemalloc_get_traces_copy_domain, + &get_traces); TABLES_UNLOCK(); if (get_traces.traces == NULL) { - PyErr_NoMemory(); - goto error; + goto no_memory; + } + if (err) { + goto no_memory; } + // Convert traces to a list of tuples set_reentrant(1); err = _Py_hashtable_foreach(get_traces.traces, tracemalloc_get_traces_fill, &get_traces); + if (!err) { + err = _Py_hashtable_foreach(get_traces.domains, + tracemalloc_get_traces_domain, &get_traces); + } set_reentrant(0); - if (err) + if (err) { goto error; + } goto finally; +no_memory: + PyErr_NoMemory(); + error: Py_CLEAR(get_traces.list); @@ -1397,6 +1405,9 @@ _tracemalloc__get_traces_impl(PyObject *module) if (get_traces.traces != NULL) { _Py_hashtable_destroy(get_traces.traces); } + if (get_traces.domains != NULL) { + tracemalloc_destroy_domains(get_traces.domains); + } return get_traces.list; } @@ -1412,12 +1423,12 @@ tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) return NULL; TABLES_LOCK(); - if (_Py_tracemalloc_config.use_domain) { - pointer_t key = {ptr, domain}; - found = _Py_HASHTABLE_GET(tracemalloc_traces, key, trace); + _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); + if (traces) { + found = _Py_HASHTABLE_GET(traces, ptr, trace); } else { - found = _Py_HASHTABLE_GET(tracemalloc_traces, ptr, trace); + found = 0; } TABLES_UNLOCK(); @@ -1564,6 +1575,19 @@ _tracemalloc_get_traceback_limit_impl(PyObject *module) } +static int +tracemalloc_get_tracemalloc_memory_cb(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry, + void *user_data) +{ + _Py_hashtable_t *traces; + _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + + size_t *size = (size_t*)user_data; + *size += _Py_hashtable_size(traces); + return 0; +} + /*[clinic input] _tracemalloc.get_tracemalloc_memory @@ -1584,6 +1608,8 @@ _tracemalloc_get_tracemalloc_memory_impl(PyObject *module) TABLES_LOCK(); size += _Py_hashtable_size(tracemalloc_traces); + _Py_hashtable_foreach(tracemalloc_domains, + tracemalloc_get_tracemalloc_memory_cb, &size); TABLES_UNLOCK(); return PyLong_FromSize_t(size); @@ -1741,18 +1767,11 @@ _PyTraceMalloc_NewReference(PyObject *op) ptr = (uintptr_t)op; } - _Py_hashtable_entry_t* entry; int res = -1; TABLES_LOCK(); - if (_Py_tracemalloc_config.use_domain) { - pointer_t key = {ptr, DEFAULT_DOMAIN}; - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, key); - } - else { - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr); - } - + _Py_hashtable_entry_t* entry; + entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr); if (entry != NULL) { /* update the traceback of the memory block */ traceback_t *traceback = traceback_new(); From f9b3b582b86b9cce8d69ec7d03d716ec81c8264a Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 13 May 2020 02:26:02 +0200 Subject: [PATCH 079/115] bpo-40609: Remove _Py_hashtable_t.key_size (GH-20060) Rewrite _Py_hashtable_t type to always store the key as a "const void *" pointer. Add an explicit "key" member to _Py_hashtable_entry_t. Remove _Py_hashtable_t.key_size member. hash and compare functions drop their hash table parameter, and their 'key' parameter type becomes "const void *". --- Include/internal/pycore_hashtable.h | 74 +++++------------- Modules/_tracemalloc.c | 117 ++++++++++++---------------- Python/hashtable.c | 111 +++++++++++--------------- Python/marshal.c | 6 +- 4 files changed, 120 insertions(+), 188 deletions(-) diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 6e094e94376ad5..965a4e7f2b4587 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -30,32 +30,13 @@ typedef struct { _Py_slist_item_t _Py_slist_item; Py_uhash_t key_hash; - - /* key (key_size bytes) and then data (data_size bytes) follows */ + void *key; + /* data (data_size bytes) follows */ } _Py_hashtable_entry_t; -#define _Py_HASHTABLE_ENTRY_PKEY(ENTRY) \ - ((const void *)((char *)(ENTRY) \ - + sizeof(_Py_hashtable_entry_t))) - #define _Py_HASHTABLE_ENTRY_PDATA(TABLE, ENTRY) \ ((const void *)((char *)(ENTRY) \ - + sizeof(_Py_hashtable_entry_t) \ - + (TABLE)->key_size)) - -/* Get a key value from pkey: use memcpy() rather than a pointer dereference - to avoid memory alignment issues. */ -#define _Py_HASHTABLE_READ_KEY(TABLE, PKEY, DST_KEY) \ - do { \ - assert(sizeof(DST_KEY) == (TABLE)->key_size); \ - memcpy(&(DST_KEY), (PKEY), sizeof(DST_KEY)); \ - } while (0) - -#define _Py_HASHTABLE_ENTRY_READ_KEY(TABLE, ENTRY, KEY) \ - do { \ - assert(sizeof(KEY) == (TABLE)->key_size); \ - memcpy(&(KEY), _Py_HASHTABLE_ENTRY_PKEY(ENTRY), sizeof(KEY)); \ - } while (0) + + sizeof(_Py_hashtable_entry_t))) #define _Py_HASHTABLE_ENTRY_READ_DATA(TABLE, ENTRY, DATA) \ do { \ @@ -78,15 +59,12 @@ typedef struct { struct _Py_hashtable_t; typedef struct _Py_hashtable_t _Py_hashtable_t; -typedef Py_uhash_t (*_Py_hashtable_hash_func) (_Py_hashtable_t *ht, - const void *pkey); -typedef int (*_Py_hashtable_compare_func) (_Py_hashtable_t *ht, - const void *pkey, - const _Py_hashtable_entry_t *he); +typedef Py_uhash_t (*_Py_hashtable_hash_func) (const void *key); +typedef int (*_Py_hashtable_compare_func) (const void *key1, const void *key2); typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *ht, - const void *pkey); + const void *key); typedef int (*_Py_hashtable_get_func) (_Py_hashtable_t *ht, - const void *pkey, void *data); + const void *key, void *data); typedef struct { /* allocate a memory block */ @@ -102,7 +80,6 @@ struct _Py_hashtable_t { size_t num_buckets; size_t entries; /* Total number of entries in the table. */ _Py_slist_t *buckets; - size_t key_size; size_t data_size; _Py_hashtable_get_func get_func; @@ -113,24 +90,19 @@ struct _Py_hashtable_t { }; /* hash a pointer (void*) */ -PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr( - struct _Py_hashtable_t *ht, - const void *pkey); +PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr(const void *key); /* comparison using memcmp() */ PyAPI_FUNC(int) _Py_hashtable_compare_direct( - _Py_hashtable_t *ht, - const void *pkey, - const _Py_hashtable_entry_t *entry); + const void *key1, + const void *key2); PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new( - size_t key_size, size_t data_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func); PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full( - size_t key_size, size_t data_size, size_t init_size, _Py_hashtable_hash_func hash_func, @@ -165,16 +137,15 @@ PyAPI_FUNC(size_t) _Py_hashtable_size(_Py_hashtable_t *ht); but use _Py_HASHTABLE_SET() and _Py_HASHTABLE_SET_NODATA() macros */ PyAPI_FUNC(int) _Py_hashtable_set( _Py_hashtable_t *ht, - size_t key_size, - const void *pkey, + const void *key, size_t data_size, const void *data); #define _Py_HASHTABLE_SET(TABLE, KEY, DATA) \ - _Py_hashtable_set(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) + _Py_hashtable_set(TABLE, (KEY), sizeof(DATA), &(DATA)) #define _Py_HASHTABLE_SET_NODATA(TABLE, KEY) \ - _Py_hashtable_set(TABLE, sizeof(KEY), &(KEY), 0, NULL) + _Py_hashtable_set(TABLE, (KEY), 0, NULL) /* Get an entry. @@ -183,14 +154,13 @@ PyAPI_FUNC(int) _Py_hashtable_set( Don't call directly this function, but use _Py_HASHTABLE_GET_ENTRY() macro */ static inline _Py_hashtable_entry_t * -_Py_hashtable_get_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey) +_Py_hashtable_get_entry(_Py_hashtable_t *ht, const void *key) { - assert(key_size == ht->key_size); - return ht->get_entry_func(ht, pkey); + return ht->get_entry_func(ht, key); } #define _Py_HASHTABLE_GET_ENTRY(TABLE, KEY) \ - _Py_hashtable_get_entry(TABLE, sizeof(KEY), &(KEY)) + _Py_hashtable_get_entry(TABLE, (const void *)(KEY)) /* Get data from an entry. Copy entry data into data and return 1 if the entry @@ -198,28 +168,26 @@ _Py_hashtable_get_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey) Don't call directly this function, but use _Py_HASHTABLE_GET() macro */ static inline int -_Py_hashtable_get(_Py_hashtable_t *ht, size_t key_size, const void *pkey, +_Py_hashtable_get(_Py_hashtable_t *ht, const void *key, size_t data_size, void *data) { - assert(key_size == ht->key_size); assert(data_size == ht->data_size); - return ht->get_func(ht, pkey, data); + return ht->get_func(ht, key, data); } #define _Py_HASHTABLE_GET(TABLE, KEY, DATA) \ - _Py_hashtable_get(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) + _Py_hashtable_get(TABLE, (KEY), sizeof(DATA), &(DATA)) /* Don't call directly this function, but use _Py_HASHTABLE_POP() macro */ PyAPI_FUNC(int) _Py_hashtable_pop( _Py_hashtable_t *ht, - size_t key_size, - const void *pkey, + const void *key, size_t data_size, void *data); #define _Py_HASHTABLE_POP(TABLE, KEY, DATA) \ - _Py_hashtable_pop(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) + _Py_hashtable_pop(TABLE, (KEY), sizeof(DATA), &(DATA)) #ifdef __cplusplus diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 7e31abe05fb6b8..050fe03bba8eca 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -23,6 +23,9 @@ static void raw_free(void *ptr); # define TRACE_DEBUG #endif +#define TO_PTR(key) ((const void *)(uintptr_t)key) +#define FROM_PTR(key) ((uintptr_t)key) + /* Protected by the GIL */ static struct { PyMemAllocatorEx mem; @@ -203,47 +206,42 @@ set_reentrant(int reentrant) static Py_uhash_t -hashtable_hash_pyobject(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_pyobject(const void *key) { - PyObject *obj; - - _Py_HASHTABLE_READ_KEY(ht, pkey, obj); + PyObject *obj = (PyObject *)key; return PyObject_Hash(obj); } static int -hashtable_compare_unicode(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) +hashtable_compare_unicode(const void *key1, const void *key2) { - PyObject *key1, *key2; - - _Py_HASHTABLE_READ_KEY(ht, pkey, key1); - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, key2); - - if (key1 != NULL && key2 != NULL) - return (PyUnicode_Compare(key1, key2) == 0); - else - return key1 == key2; + PyObject *obj1 = (PyObject *)key1; + PyObject *obj2 = (PyObject *)key2; + if (obj1 != NULL && obj2 != NULL) { + return (PyUnicode_Compare(obj1, obj2) == 0); + } + else { + return obj1 == obj2; + } } static Py_uhash_t -hashtable_hash_uint(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_uint(const void *key_raw) { - unsigned int key; - _Py_HASHTABLE_READ_KEY(ht, pkey, key); + unsigned int key = (unsigned int)FROM_PTR(key_raw); return (Py_uhash_t)key; } static _Py_hashtable_t * -hashtable_new(size_t key_size, size_t data_size, +hashtable_new(size_t data_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func) { _Py_hashtable_allocator_t hashtable_alloc = {malloc, free}; - return _Py_hashtable_new_full(key_size, data_size, 0, + return _Py_hashtable_new_full(data_size, 0, hash_func, compare_func, &hashtable_alloc); } @@ -263,39 +261,33 @@ raw_free(void *ptr) static Py_uhash_t -hashtable_hash_traceback(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_traceback(const void *key) { - traceback_t *traceback; - - _Py_HASHTABLE_READ_KEY(ht, pkey, traceback); + const traceback_t *traceback = (const traceback_t *)key; return traceback->hash; } static int -hashtable_compare_traceback(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) +hashtable_compare_traceback(const void *key1, const void *key2) { - traceback_t *traceback1, *traceback2; - const frame_t *frame1, *frame2; - int i; + const traceback_t *traceback1 = (const traceback_t *)key1; + const traceback_t *traceback2 = (const traceback_t *)key2; - _Py_HASHTABLE_READ_KEY(ht, pkey, traceback1); - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, traceback2); - - if (traceback1->nframe != traceback2->nframe) + if (traceback1->nframe != traceback2->nframe) { return 0; - - if (traceback1->total_nframe != traceback2->total_nframe) + } + if (traceback1->total_nframe != traceback2->total_nframe) { return 0; + } - for (i=0; i < traceback1->nframe; i++) { - frame1 = &traceback1->frames[i]; - frame2 = &traceback2->frames[i]; + for (int i=0; i < traceback1->nframe; i++) { + const frame_t *frame1 = &traceback1->frames[i]; + const frame_t *frame2 = &traceback2->frames[i]; - if (frame1->lineno != frame2->lineno) + if (frame1->lineno != frame2->lineno) { return 0; - + } if (frame1->filename != frame2->filename) { assert(PyUnicode_Compare(frame1->filename, frame2->filename) != 0); return 0; @@ -349,7 +341,7 @@ tracemalloc_get_frame(PyFrameObject *pyframe, frame_t *frame) _Py_hashtable_entry_t *entry; entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_filenames, filename); if (entry != NULL) { - _Py_HASHTABLE_ENTRY_READ_KEY(tracemalloc_filenames, entry, filename); + filename = (PyObject *)entry->key; } else { /* tracemalloc_filenames is responsible to keep a reference @@ -444,7 +436,7 @@ traceback_new(void) /* intern the traceback */ entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_tracebacks, traceback); if (entry != NULL) { - _Py_HASHTABLE_ENTRY_READ_KEY(tracemalloc_tracebacks, entry, traceback); + traceback = (traceback_t *)entry->key; } else { traceback_t *copy; @@ -477,8 +469,7 @@ traceback_new(void) static _Py_hashtable_t* tracemalloc_create_traces_table(void) { - return hashtable_new(sizeof(uintptr_t), - sizeof(trace_t), + return hashtable_new(sizeof(trace_t), _Py_hashtable_hash_ptr, _Py_hashtable_compare_direct); } @@ -487,8 +478,7 @@ tracemalloc_create_traces_table(void) static _Py_hashtable_t* tracemalloc_create_domains_table(void) { - return hashtable_new(sizeof(unsigned int), - sizeof(_Py_hashtable_t *), + return hashtable_new(sizeof(_Py_hashtable_t *), hashtable_hash_uint, _Py_hashtable_compare_direct); } @@ -522,7 +512,7 @@ tracemalloc_get_traces_table(unsigned int domain) } else { _Py_hashtable_t *traces = NULL; - (void)_Py_HASHTABLE_GET(tracemalloc_domains, domain, traces); + (void)_Py_HASHTABLE_GET(tracemalloc_domains, TO_PTR(domain), traces); return traces; } } @@ -539,7 +529,7 @@ tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) } trace_t trace; - if (!_Py_HASHTABLE_POP(traces, ptr, trace)) { + if (!_Py_HASHTABLE_POP(traces, TO_PTR(ptr), trace)) { return; } assert(tracemalloc_traced_memory >= trace.size); @@ -568,7 +558,7 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, return -1; } - if (_Py_HASHTABLE_SET(tracemalloc_domains, domain, traces) < 0) { + if (_Py_HASHTABLE_SET(tracemalloc_domains, TO_PTR(domain), traces) < 0) { _Py_hashtable_destroy(traces); return -1; } @@ -590,7 +580,7 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, trace.size = size; trace.traceback = traceback; - int res = _Py_HASHTABLE_SET(traces, ptr, trace); + int res = _Py_HASHTABLE_SET(traces, TO_PTR(ptr), trace); if (res != 0) { return res; } @@ -859,9 +849,7 @@ static int tracemalloc_clear_filename(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, void *user_data) { - PyObject *filename; - - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, filename); + PyObject *filename = (PyObject *)entry->key; Py_DECREF(filename); return 0; } @@ -871,9 +859,7 @@ static int traceback_free_traceback(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, void *user_data) { - traceback_t *traceback; - - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, traceback); + traceback_t *traceback = (traceback_t *)entry->key; raw_free(traceback); return 0; } @@ -936,11 +922,11 @@ tracemalloc_init(void) } #endif - tracemalloc_filenames = hashtable_new(sizeof(PyObject *), 0, + tracemalloc_filenames = hashtable_new(0, hashtable_hash_pyobject, hashtable_compare_unicode); - tracemalloc_tracebacks = hashtable_new(sizeof(traceback_t *), 0, + tracemalloc_tracebacks = hashtable_new(0, hashtable_hash_traceback, hashtable_compare_traceback); @@ -1154,7 +1140,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) PyObject *frames, *frame; if (intern_table != NULL) { - if (_Py_HASHTABLE_GET(intern_table, traceback, frames)) { + if (_Py_HASHTABLE_GET(intern_table, (const void *)traceback, frames)) { Py_INCREF(frames); return frames; } @@ -1244,13 +1230,12 @@ tracemalloc_get_traces_copy_domain(_Py_hashtable_t *domains, { get_traces_t *get_traces = user_data; - unsigned int domain; - _Py_HASHTABLE_ENTRY_READ_KEY(domains, entry, domain); + unsigned int domain = (unsigned int)FROM_PTR(entry->key); _Py_hashtable_t *traces; _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); _Py_hashtable_t *traces2 = _Py_hashtable_copy(traces); - if (_Py_HASHTABLE_SET(get_traces->domains, domain, traces2) < 0) { + if (_Py_HASHTABLE_SET(get_traces->domains, TO_PTR(domain), traces2) < 0) { _Py_hashtable_destroy(traces2); return -1; } @@ -1289,8 +1274,7 @@ tracemalloc_get_traces_domain(_Py_hashtable_t *domains, { get_traces_t *get_traces = user_data; - unsigned int domain; - _Py_HASHTABLE_ENTRY_READ_KEY(domains, entry, domain); + unsigned int domain = (unsigned int)FROM_PTR(entry->key); _Py_hashtable_t *traces; _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); @@ -1343,8 +1327,7 @@ _tracemalloc__get_traces_impl(PyObject *module) /* the traceback hash table is used temporarily to intern traceback tuple of (filename, lineno) tuples */ - get_traces.tracebacks = hashtable_new(sizeof(traceback_t *), - sizeof(PyObject *), + get_traces.tracebacks = hashtable_new(sizeof(PyObject *), _Py_hashtable_hash_ptr, _Py_hashtable_compare_direct); if (get_traces.tracebacks == NULL) { @@ -1425,7 +1408,7 @@ tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) TABLES_LOCK(); _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); if (traces) { - found = _Py_HASHTABLE_GET(traces, ptr, trace); + found = _Py_HASHTABLE_GET(traces, TO_PTR(ptr), trace); } else { found = 0; diff --git a/Python/hashtable.c b/Python/hashtable.c index 90fe34e6280161..01d84398cc79fc 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -59,7 +59,7 @@ #define ENTRY_NEXT(ENTRY) \ ((_Py_hashtable_entry_t *)_Py_SLIST_ITEM_NEXT(ENTRY)) #define HASHTABLE_ITEM_SIZE(HT) \ - (sizeof(_Py_hashtable_entry_t) + (HT)->key_size + (HT)->data_size) + (sizeof(_Py_hashtable_entry_t) + (HT)->data_size) #define ENTRY_READ_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ do { \ @@ -105,20 +105,16 @@ _Py_slist_remove(_Py_slist_t *list, _Py_slist_item_t *previous, Py_uhash_t -_Py_hashtable_hash_ptr(struct _Py_hashtable_t *ht, const void *pkey) +_Py_hashtable_hash_ptr(const void *key) { - void *key; - _Py_HASHTABLE_READ_KEY(ht, pkey, key); return (Py_uhash_t)_Py_HashPointerRaw(key); } int -_Py_hashtable_compare_direct(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) +_Py_hashtable_compare_direct(const void *key1, const void *key2) { - const void *pkey2 = _Py_HASHTABLE_ENTRY_PKEY(entry); - return (memcmp(pkey, pkey2, ht->key_size) == 0); + return (key1 == key2); } @@ -195,16 +191,16 @@ _Py_hashtable_print_stats(_Py_hashtable_t *ht) _Py_hashtable_entry_t * -_Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *pkey) +_Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) { - Py_uhash_t key_hash = ht->hash_func(ht, pkey); + Py_uhash_t key_hash = ht->hash_func(key); size_t index = key_hash & (ht->num_buckets - 1); _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); while (1) { if (entry == NULL) { return NULL; } - if (entry->key_hash == key_hash && ht->compare_func(ht, pkey, entry)) { + if (entry->key_hash == key_hash && ht->compare_func(key, entry->key)) { break; } entry = ENTRY_NEXT(entry); @@ -214,28 +210,27 @@ _Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *pkey) static int -_Py_hashtable_pop_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey, +_Py_hashtable_pop_entry(_Py_hashtable_t *ht, const void *key, void *data, size_t data_size) { - Py_uhash_t key_hash; - size_t index; - _Py_hashtable_entry_t *entry, *previous; - assert(key_size == ht->key_size); - - key_hash = ht->hash_func(ht, pkey); - index = key_hash & (ht->num_buckets - 1); + Py_uhash_t key_hash = ht->hash_func(key); + size_t index = key_hash & (ht->num_buckets - 1); - previous = NULL; - for (entry = TABLE_HEAD(ht, index); entry != NULL; entry = ENTRY_NEXT(entry)) { - if (entry->key_hash == key_hash && ht->compare_func(ht, pkey, entry)) + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, index); + _Py_hashtable_entry_t *previous = NULL; + while (1) { + if (entry == NULL) { + // not found + return 0; + } + if (entry->key_hash == key_hash && ht->compare_func(key, entry->key)) { break; + } previous = entry; + entry = ENTRY_NEXT(entry); } - if (entry == NULL) - return 0; - _Py_slist_remove(&ht->buckets[index], (_Py_slist_item_t *)previous, (_Py_slist_item_t *)entry); ht->entries--; @@ -251,26 +246,22 @@ _Py_hashtable_pop_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey, int -_Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, +_Py_hashtable_set(_Py_hashtable_t *ht, const void *key, size_t data_size, const void *data) { - Py_uhash_t key_hash; - size_t index; _Py_hashtable_entry_t *entry; - assert(key_size == ht->key_size); - assert(data != NULL || data_size == 0); #ifndef NDEBUG /* Don't write the assertion on a single line because it is interesting to know the duplicated entry if the assertion failed. The entry can be read using a debugger. */ - entry = ht->get_entry_func(ht, pkey); + entry = ht->get_entry_func(ht, key); assert(entry == NULL); #endif - key_hash = ht->hash_func(ht, pkey); - index = key_hash & (ht->num_buckets - 1); + Py_uhash_t key_hash = ht->hash_func(key); + size_t index = key_hash & (ht->num_buckets - 1); entry = ht->alloc.malloc(HASHTABLE_ITEM_SIZE(ht)); if (entry == NULL) { @@ -279,9 +270,10 @@ _Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, } entry->key_hash = key_hash; - memcpy((void *)_Py_HASHTABLE_ENTRY_PKEY(entry), pkey, ht->key_size); - if (data) + entry->key = (void *)key; + if (data) { ENTRY_WRITE_PDATA(ht, entry, data_size, data); + } _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); ht->entries++; @@ -293,10 +285,10 @@ _Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, int -_Py_hashtable_get_generic(_Py_hashtable_t *ht, const void *pkey, void *data) +_Py_hashtable_get_generic(_Py_hashtable_t *ht, const void *key, void *data) { assert(data != NULL); - _Py_hashtable_entry_t *entry = ht->get_entry_func(ht, pkey); + _Py_hashtable_entry_t *entry = ht->get_entry_func(ht, key); if (entry != NULL) { ENTRY_READ_PDATA(ht, entry, ht->data_size, data); return 1; @@ -308,13 +300,12 @@ _Py_hashtable_get_generic(_Py_hashtable_t *ht, const void *pkey, void *data) // Specialized for: -// key_size == sizeof(void*) // hash_func == _Py_hashtable_hash_ptr // compare_func == _Py_hashtable_compare_direct _Py_hashtable_entry_t * -_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *pkey) +_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) { - Py_uhash_t key_hash = _Py_hashtable_hash_ptr(ht, pkey); + Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key); size_t index = key_hash & (ht->num_buckets - 1); _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); while (1) { @@ -322,8 +313,7 @@ _Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *pkey) return NULL; } if (entry->key_hash == key_hash) { - const void *pkey2 = _Py_HASHTABLE_ENTRY_PKEY(entry); - if (memcmp(pkey, pkey2, sizeof(void*)) == 0) { + if (entry->key == key) { break; } } @@ -334,14 +324,13 @@ _Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *pkey) // Specialized for: -// key_size == sizeof(void*) // hash_func == _Py_hashtable_hash_ptr // compare_func == _Py_hashtable_compare_direct int -_Py_hashtable_get_ptr(_Py_hashtable_t *ht, const void *pkey, void *data) +_Py_hashtable_get_ptr(_Py_hashtable_t *ht, const void *key, void *data) { assert(data != NULL); - _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry_ptr(ht, pkey); + _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry_ptr(ht, key); if (entry != NULL) { ENTRY_READ_PDATA(ht, entry, ht->data_size, data); return 1; @@ -353,24 +342,24 @@ _Py_hashtable_get_ptr(_Py_hashtable_t *ht, const void *pkey, void *data) int -_Py_hashtable_pop(_Py_hashtable_t *ht, size_t key_size, const void *pkey, +_Py_hashtable_pop(_Py_hashtable_t *ht, const void *key, size_t data_size, void *data) { assert(data != NULL); - return _Py_hashtable_pop_entry(ht, key_size, pkey, data, data_size); + return _Py_hashtable_pop_entry(ht, key, data, data_size); } /* Code commented since the function is not needed in Python */ #if 0 void -_Py_hashtable_delete(_Py_hashtable_t *ht, size_t key_size, const void *pkey) +_Py_hashtable_delete(_Py_hashtable_t *ht, size_t const void *key) { #ifndef NDEBUG - int found = _Py_hashtable_pop_entry(ht, key_size, pkey, NULL, 0); + int found = _Py_hashtable_pop_entry(ht, key, NULL, 0); assert(found); #else - (void)_Py_hashtable_pop_entry(ht, key_size, pkey, NULL, 0); + (void)_Py_hashtable_pop_entry(ht, key, NULL, 0); #endif } #endif @@ -427,7 +416,7 @@ hashtable_rehash(_Py_hashtable_t *ht) size_t entry_index; - assert(ht->hash_func(ht, _Py_HASHTABLE_ENTRY_PKEY(entry)) == entry->key_hash); + assert(ht->hash_func(entry->key) == entry->key_hash); next = ENTRY_NEXT(entry); entry_index = entry->key_hash & (new_size - 1); @@ -440,8 +429,7 @@ hashtable_rehash(_Py_hashtable_t *ht) _Py_hashtable_t * -_Py_hashtable_new_full(size_t key_size, size_t data_size, - size_t init_size, +_Py_hashtable_new_full(size_t data_size, size_t init_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, _Py_hashtable_allocator_t *allocator) @@ -464,7 +452,6 @@ _Py_hashtable_new_full(size_t key_size, size_t data_size, ht->num_buckets = round_size(init_size); ht->entries = 0; - ht->key_size = key_size; ht->data_size = data_size; buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); @@ -480,8 +467,7 @@ _Py_hashtable_new_full(size_t key_size, size_t data_size, ht->hash_func = hash_func; ht->compare_func = compare_func; ht->alloc = alloc; - if (ht->key_size == sizeof(void*) - && ht->hash_func == _Py_hashtable_hash_ptr + if (ht->hash_func == _Py_hashtable_hash_ptr && ht->compare_func == _Py_hashtable_compare_direct) { ht->get_func = _Py_hashtable_get_ptr; @@ -492,12 +478,11 @@ _Py_hashtable_new_full(size_t key_size, size_t data_size, _Py_hashtable_t * -_Py_hashtable_new(size_t key_size, size_t data_size, +_Py_hashtable_new(size_t data_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func) { - return _Py_hashtable_new_full(key_size, data_size, - HASHTABLE_MIN_SIZE, + return _Py_hashtable_new_full(data_size, HASHTABLE_MIN_SIZE, hash_func, compare_func, NULL); } @@ -543,15 +528,13 @@ _Py_hashtable_destroy(_Py_hashtable_t *ht) _Py_hashtable_t * _Py_hashtable_copy(_Py_hashtable_t *src) { - const size_t key_size = src->key_size; const size_t data_size = src->data_size; _Py_hashtable_t *dst; _Py_hashtable_entry_t *entry; size_t bucket; int err; - dst = _Py_hashtable_new_full(key_size, data_size, - src->num_buckets, + dst = _Py_hashtable_new_full(data_size, src->num_buckets, src->hash_func, src->compare_func, &src->alloc); @@ -561,9 +544,9 @@ _Py_hashtable_copy(_Py_hashtable_t *src) for (bucket=0; bucket < src->num_buckets; bucket++) { entry = TABLE_HEAD(src, bucket); for (; entry; entry = ENTRY_NEXT(entry)) { - const void *pkey = _Py_HASHTABLE_ENTRY_PKEY(entry); + const void *key = entry->key; const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(src, entry); - err = _Py_hashtable_set(dst, key_size, pkey, data_size, pdata); + err = _Py_hashtable_set(dst, key, data_size, pdata); if (err) { _Py_hashtable_destroy(dst); return NULL; diff --git a/Python/marshal.c b/Python/marshal.c index d2bff524f30dde..1e901ae7c3133e 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -549,7 +549,7 @@ static int w_init_refs(WFILE *wf, int version) { if (version >= 3) { - wf->hashtable = _Py_hashtable_new(sizeof(PyObject *), sizeof(int), + wf->hashtable = _Py_hashtable_new(sizeof(int), _Py_hashtable_hash_ptr, _Py_hashtable_compare_direct); if (wf->hashtable == NULL) { @@ -564,9 +564,7 @@ static int w_decref_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, void *Py_UNUSED(data)) { - PyObject *entry_key; - - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, entry_key); + PyObject *entry_key = (PyObject *)entry->key; Py_XDECREF(entry_key); return 0; } From 2d0a3d682f699cce8db6e30981d41d9125318726 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 13 May 2020 02:50:18 +0200 Subject: [PATCH 080/115] bpo-40609: Add destroy functions to _Py_hashtable (GH-20062) Add key_destroy_func and value_destroy_func parameters to _Py_hashtable_new_full(). marshal.c and _tracemalloc.c use these destroy functions. --- Include/internal/pycore_hashtable.h | 13 ++++++-- Modules/_tracemalloc.c | 51 ++++++++++++++--------------- Python/hashtable.c | 37 +++++++++++++++------ Python/marshal.c | 24 +++++++------- 4 files changed, 73 insertions(+), 52 deletions(-) diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 965a4e7f2b4587..3c7483a058f71d 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -34,21 +34,21 @@ typedef struct { /* data (data_size bytes) follows */ } _Py_hashtable_entry_t; -#define _Py_HASHTABLE_ENTRY_PDATA(TABLE, ENTRY) \ +#define _Py_HASHTABLE_ENTRY_PDATA(ENTRY) \ ((const void *)((char *)(ENTRY) \ + sizeof(_Py_hashtable_entry_t))) #define _Py_HASHTABLE_ENTRY_READ_DATA(TABLE, ENTRY, DATA) \ do { \ assert(sizeof(DATA) == (TABLE)->data_size); \ - memcpy(&(DATA), _Py_HASHTABLE_ENTRY_PDATA(TABLE, (ENTRY)), \ + memcpy(&(DATA), _Py_HASHTABLE_ENTRY_PDATA((ENTRY)), \ sizeof(DATA)); \ } while (0) #define _Py_HASHTABLE_ENTRY_WRITE_DATA(TABLE, ENTRY, DATA) \ do { \ assert(sizeof(DATA) == (TABLE)->data_size); \ - memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA((TABLE), (ENTRY)), \ + memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ &(DATA), sizeof(DATA)); \ } while (0) @@ -61,6 +61,9 @@ typedef struct _Py_hashtable_t _Py_hashtable_t; typedef Py_uhash_t (*_Py_hashtable_hash_func) (const void *key); typedef int (*_Py_hashtable_compare_func) (const void *key1, const void *key2); +typedef void (*_Py_hashtable_destroy_func) (void *key); +typedef void (*_Py_hashtable_value_destroy_func) (_Py_hashtable_t *ht, + _Py_hashtable_entry_t *entry); typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *ht, const void *key); typedef int (*_Py_hashtable_get_func) (_Py_hashtable_t *ht, @@ -86,6 +89,8 @@ struct _Py_hashtable_t { _Py_hashtable_get_entry_func get_entry_func; _Py_hashtable_hash_func hash_func; _Py_hashtable_compare_func compare_func; + _Py_hashtable_destroy_func key_destroy_func; + _Py_hashtable_value_destroy_func value_destroy_func; _Py_hashtable_allocator_t alloc; }; @@ -107,6 +112,8 @@ PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full( size_t init_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, + _Py_hashtable_destroy_func key_destroy_func, + _Py_hashtable_value_destroy_func value_destroy_func, _Py_hashtable_allocator_t *allocator); PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht); diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 050fe03bba8eca..618bf476d99ade 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -238,12 +238,13 @@ hashtable_hash_uint(const void *key_raw) static _Py_hashtable_t * hashtable_new(size_t data_size, _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func) + _Py_hashtable_compare_func compare_func, + _Py_hashtable_value_destroy_func value_destroy_fun) { _Py_hashtable_allocator_t hashtable_alloc = {malloc, free}; return _Py_hashtable_new_full(data_size, 0, hash_func, compare_func, - &hashtable_alloc); + NULL, value_destroy_fun, &hashtable_alloc); } @@ -471,35 +472,34 @@ tracemalloc_create_traces_table(void) { return hashtable_new(sizeof(trace_t), _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); + _Py_hashtable_compare_direct, + NULL); } -static _Py_hashtable_t* -tracemalloc_create_domains_table(void) +static void +tracemalloc_destroy_domain_table(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry) { - return hashtable_new(sizeof(_Py_hashtable_t *), - hashtable_hash_uint, - _Py_hashtable_compare_direct); + _Py_hashtable_t *traces; + _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + _Py_hashtable_destroy(traces); } -static int -tracemalloc_destroy_domains_cb(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry, - void *user_data) +static _Py_hashtable_t* +tracemalloc_create_domains_table(void) { - _Py_hashtable_t *traces; - _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); - _Py_hashtable_destroy(traces); - return 0; + return hashtable_new(sizeof(_Py_hashtable_t *), + hashtable_hash_uint, + _Py_hashtable_compare_direct, + tracemalloc_destroy_domain_table); } static void tracemalloc_destroy_domains(_Py_hashtable_t *domains) { - _Py_hashtable_foreach(domains, tracemalloc_destroy_domains_cb, NULL); _Py_hashtable_destroy(domains); } @@ -924,11 +924,13 @@ tracemalloc_init(void) tracemalloc_filenames = hashtable_new(0, hashtable_hash_pyobject, - hashtable_compare_unicode); + hashtable_compare_unicode, + NULL); tracemalloc_tracebacks = hashtable_new(0, hashtable_hash_traceback, - hashtable_compare_traceback); + hashtable_compare_traceback, + NULL); tracemalloc_traces = tracemalloc_create_traces_table(); tracemalloc_domains = tracemalloc_create_domains_table(); @@ -1285,15 +1287,13 @@ tracemalloc_get_traces_domain(_Py_hashtable_t *domains, } -static int +static void tracemalloc_pyobject_decref_cb(_Py_hashtable_t *tracebacks, - _Py_hashtable_entry_t *entry, - void *user_data) + _Py_hashtable_entry_t *entry) { PyObject *obj; _Py_HASHTABLE_ENTRY_READ_DATA(tracebacks, entry, obj); Py_DECREF(obj); - return 0; } @@ -1329,7 +1329,8 @@ _tracemalloc__get_traces_impl(PyObject *module) of (filename, lineno) tuples */ get_traces.tracebacks = hashtable_new(sizeof(PyObject *), _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); + _Py_hashtable_compare_direct, + tracemalloc_pyobject_decref_cb); if (get_traces.tracebacks == NULL) { goto no_memory; } @@ -1381,8 +1382,6 @@ _tracemalloc__get_traces_impl(PyObject *module) finally: if (get_traces.tracebacks != NULL) { - _Py_hashtable_foreach(get_traces.tracebacks, - tracemalloc_pyobject_decref_cb, NULL); _Py_hashtable_destroy(get_traces.tracebacks); } if (get_traces.traces != NULL) { diff --git a/Python/hashtable.c b/Python/hashtable.c index 01d84398cc79fc..0c013bbccf5573 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -64,14 +64,14 @@ #define ENTRY_READ_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ do { \ assert((DATA_SIZE) == (TABLE)->data_size); \ - memcpy((PDATA), _Py_HASHTABLE_ENTRY_PDATA(TABLE, (ENTRY)), \ + memcpy((PDATA), _Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ (DATA_SIZE)); \ } while (0) #define ENTRY_WRITE_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ do { \ assert((DATA_SIZE) == (TABLE)->data_size); \ - memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA((TABLE), (ENTRY)), \ + memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ (PDATA), (DATA_SIZE)); \ } while (0) @@ -432,6 +432,8 @@ _Py_hashtable_t * _Py_hashtable_new_full(size_t data_size, size_t init_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, + _Py_hashtable_destroy_func key_destroy_func, + _Py_hashtable_value_destroy_func value_destroy_func, _Py_hashtable_allocator_t *allocator) { _Py_hashtable_t *ht; @@ -466,6 +468,8 @@ _Py_hashtable_new_full(size_t data_size, size_t init_size, ht->get_entry_func = _Py_hashtable_get_entry_generic; ht->hash_func = hash_func; ht->compare_func = compare_func; + ht->key_destroy_func = key_destroy_func; + ht->value_destroy_func = value_destroy_func; ht->alloc = alloc; if (ht->hash_func == _Py_hashtable_hash_ptr && ht->compare_func == _Py_hashtable_compare_direct) @@ -484,7 +488,7 @@ _Py_hashtable_new(size_t data_size, { return _Py_hashtable_new_full(data_size, HASHTABLE_MIN_SIZE, hash_func, compare_func, - NULL); + NULL, NULL, NULL); } @@ -506,16 +510,27 @@ _Py_hashtable_clear(_Py_hashtable_t *ht) } +static void +_Py_hashtable_destroy_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry) +{ + if (ht->key_destroy_func) { + ht->key_destroy_func(entry->key); + } + if (ht->value_destroy_func) { + ht->value_destroy_func(ht, entry); + } + ht->alloc.free(entry); +} + + void _Py_hashtable_destroy(_Py_hashtable_t *ht) { - size_t i; - - for (i = 0; i < ht->num_buckets; i++) { - _Py_slist_item_t *entry = ht->buckets[i].head; + for (size_t i = 0; i < ht->num_buckets; i++) { + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, i); while (entry) { - _Py_slist_item_t *entry_next = entry->next; - ht->alloc.free(entry); + _Py_hashtable_entry_t *entry_next = ENTRY_NEXT(entry); + _Py_hashtable_destroy_entry(ht, entry); entry = entry_next; } } @@ -537,6 +552,8 @@ _Py_hashtable_copy(_Py_hashtable_t *src) dst = _Py_hashtable_new_full(data_size, src->num_buckets, src->hash_func, src->compare_func, + src->key_destroy_func, + src->value_destroy_func, &src->alloc); if (dst == NULL) return NULL; @@ -545,7 +562,7 @@ _Py_hashtable_copy(_Py_hashtable_t *src) entry = TABLE_HEAD(src, bucket); for (; entry; entry = ENTRY_NEXT(entry)) { const void *key = entry->key; - const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(src, entry); + const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(entry); err = _Py_hashtable_set(dst, key, data_size, pdata); if (err) { _Py_hashtable_destroy(dst); diff --git a/Python/marshal.c b/Python/marshal.c index 1e901ae7c3133e..7c99c1ee13c0ee 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -545,13 +545,21 @@ w_complex_object(PyObject *v, char flag, WFILE *p) } } +static void +w_decref_entry(void *key) +{ + PyObject *entry_key = (PyObject *)key; + Py_XDECREF(entry_key); +} + static int w_init_refs(WFILE *wf, int version) { if (version >= 3) { - wf->hashtable = _Py_hashtable_new(sizeof(int), - _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); + wf->hashtable = _Py_hashtable_new_full(sizeof(int), 0, + _Py_hashtable_hash_ptr, + _Py_hashtable_compare_direct, + w_decref_entry, NULL, NULL); if (wf->hashtable == NULL) { PyErr_NoMemory(); return -1; @@ -560,20 +568,10 @@ w_init_refs(WFILE *wf, int version) return 0; } -static int -w_decref_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, - void *Py_UNUSED(data)) -{ - PyObject *entry_key = (PyObject *)entry->key; - Py_XDECREF(entry_key); - return 0; -} - static void w_clear_refs(WFILE *wf) { if (wf->hashtable != NULL) { - _Py_hashtable_foreach(wf->hashtable, w_decref_entry, NULL); _Py_hashtable_destroy(wf->hashtable); } } From d95bd4214c2babe851b02562d973d60c02e639b7 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 13 May 2020 03:52:11 +0200 Subject: [PATCH 081/115] bpo-40609: _tracemalloc allocates traces (GH-20064) Rewrite _tracemalloc to store "trace_t*" rather than directly "trace_t" in traces hash tables. Traces are now allocated on the heap memory, outside the hash table. Add tracemalloc_copy_traces() and tracemalloc_copy_domains() helper functions. Remove _Py_hashtable_copy() function since there is no API to copy a key or a value. Remove also _Py_hashtable_delete() function which was commented. --- Include/internal/pycore_hashtable.h | 10 -- Modules/_tracemalloc.c | 160 ++++++++++++++++++++-------- Python/hashtable.c | 49 --------- 3 files changed, 117 insertions(+), 102 deletions(-) diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 3c7483a058f71d..0da2ffdb389e51 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -45,13 +45,6 @@ typedef struct { sizeof(DATA)); \ } while (0) -#define _Py_HASHTABLE_ENTRY_WRITE_DATA(TABLE, ENTRY, DATA) \ - do { \ - assert(sizeof(DATA) == (TABLE)->data_size); \ - memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ - &(DATA), sizeof(DATA)); \ - } while (0) - /* _Py_hashtable: prototypes */ @@ -118,9 +111,6 @@ PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full( PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht); -/* Return a copy of the hash table */ -PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_copy(_Py_hashtable_t *src); - PyAPI_FUNC(void) _Py_hashtable_clear(_Py_hashtable_t *ht); typedef int (*_Py_hashtable_foreach_func) (_Py_hashtable_t *ht, diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 618bf476d99ade..a42349a8e47192 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -122,7 +122,7 @@ static traceback_t *tracemalloc_traceback = NULL; Protected by the GIL */ static _Py_hashtable_t *tracemalloc_tracebacks = NULL; -/* pointer (void*) => trace (trace_t). +/* pointer (void*) => trace (trace_t*). Protected by TABLES_LOCK(). */ static _Py_hashtable_t *tracemalloc_traces = NULL; @@ -467,13 +467,23 @@ traceback_new(void) } +static void +tracemalloc_destroy_trace_cb(_Py_hashtable_t *traces, + _Py_hashtable_entry_t *entry) +{ + trace_t *trace; + _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); + raw_free(trace); +} + + static _Py_hashtable_t* tracemalloc_create_traces_table(void) { - return hashtable_new(sizeof(trace_t), + return hashtable_new(sizeof(trace_t*), _Py_hashtable_hash_ptr, _Py_hashtable_compare_direct, - NULL); + tracemalloc_destroy_trace_cb); } @@ -528,12 +538,13 @@ tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) return; } - trace_t trace; + trace_t *trace; if (!_Py_HASHTABLE_POP(traces, TO_PTR(ptr), trace)) { return; } - assert(tracemalloc_traced_memory >= trace.size); - tracemalloc_traced_memory -= trace.size; + assert(tracemalloc_traced_memory >= trace->size); + tracemalloc_traced_memory -= trace->size; + raw_free(trace); } #define REMOVE_TRACE(ptr) \ @@ -565,23 +576,27 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, } _Py_hashtable_entry_t* entry = _Py_HASHTABLE_GET_ENTRY(traces, ptr); - trace_t trace; if (entry != NULL) { /* the memory block is already tracked */ + trace_t *trace; _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); - assert(tracemalloc_traced_memory >= trace.size); - tracemalloc_traced_memory -= trace.size; + assert(tracemalloc_traced_memory >= trace->size); + tracemalloc_traced_memory -= trace->size; - trace.size = size; - trace.traceback = traceback; - _Py_HASHTABLE_ENTRY_WRITE_DATA(traces, entry, trace); + trace->size = size; + trace->traceback = traceback; } else { - trace.size = size; - trace.traceback = traceback; + trace_t *trace = raw_malloc(sizeof(trace_t)); + if (trace == NULL) { + return -1; + } + trace->size = size; + trace->traceback = traceback; int res = _Py_HASHTABLE_SET(traces, TO_PTR(ptr), trace); if (res != 0) { + raw_free(trace); return res; } } @@ -1225,19 +1240,62 @@ typedef struct { unsigned int domain; } get_traces_t; + static int -tracemalloc_get_traces_copy_domain(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry, - void *user_data) +tracemalloc_copy_trace(_Py_hashtable_t *traces, + _Py_hashtable_entry_t *entry, + void *traces2_raw) { - get_traces_t *get_traces = user_data; + _Py_hashtable_t *traces2 = (_Py_hashtable_t *)traces2_raw; + + trace_t *trace; + _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); + + trace_t *trace2 = raw_malloc(sizeof(trace_t)); + if (traces2 == NULL) { + return -1; + } + *trace2 = *trace; + if (_Py_HASHTABLE_SET(traces2, entry->key, trace2) < 0) { + raw_free(trace2); + return -1; + } + return 0; +} + + +static _Py_hashtable_t* +tracemalloc_copy_traces(_Py_hashtable_t *traces) +{ + _Py_hashtable_t *traces2 = tracemalloc_create_traces_table(); + if (traces2 == NULL) { + return NULL; + } + + int err = _Py_hashtable_foreach(traces, + tracemalloc_copy_trace, + traces2); + if (err) { + _Py_hashtable_destroy(traces2); + return NULL; + } + return traces2; +} + + +static int +tracemalloc_copy_domain(_Py_hashtable_t *domains, + _Py_hashtable_entry_t *entry, + void *domains2_raw) +{ + _Py_hashtable_t *domains2 = (_Py_hashtable_t *)domains2_raw; unsigned int domain = (unsigned int)FROM_PTR(entry->key); _Py_hashtable_t *traces; _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); - _Py_hashtable_t *traces2 = _Py_hashtable_copy(traces); - if (_Py_HASHTABLE_SET(get_traces->domains, TO_PTR(domain), traces2) < 0) { + _Py_hashtable_t *traces2 = tracemalloc_copy_traces(traces); + if (_Py_HASHTABLE_SET(domains2, TO_PTR(domain), traces2) < 0) { _Py_hashtable_destroy(traces2); return -1; } @@ -1245,18 +1303,37 @@ tracemalloc_get_traces_copy_domain(_Py_hashtable_t *domains, } +static _Py_hashtable_t* +tracemalloc_copy_domains(_Py_hashtable_t *domains) +{ + _Py_hashtable_t *domains2 = tracemalloc_create_domains_table(); + if (domains2 == NULL) { + return NULL; + } + + int err = _Py_hashtable_foreach(domains, + tracemalloc_copy_domain, + domains2); + if (err) { + _Py_hashtable_destroy(domains2); + return NULL; + } + return domains2; +} + + static int tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entry, void *user_data) { get_traces_t *get_traces = user_data; - trace_t trace; + trace_t *trace; PyObject *tracemalloc_obj; int res; _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); - tracemalloc_obj = trace_to_pyobject(get_traces->domain, &trace, get_traces->tracebacks); + tracemalloc_obj = trace_to_pyobject(get_traces->domain, trace, get_traces->tracebacks); if (tracemalloc_obj == NULL) return 1; @@ -1335,37 +1412,34 @@ _tracemalloc__get_traces_impl(PyObject *module) goto no_memory; } - get_traces.domains = tracemalloc_create_domains_table(); - if (get_traces.domains == NULL) { - goto no_memory; - } - - int err; - // Copy all traces so tracemalloc_get_traces_fill() doesn't have to disable // temporarily tracemalloc which would impact other threads and so would // miss allocations while get_traces() is called. TABLES_LOCK(); - get_traces.traces = _Py_hashtable_copy(tracemalloc_traces); - err = _Py_hashtable_foreach(tracemalloc_domains, - tracemalloc_get_traces_copy_domain, - &get_traces); + get_traces.traces = tracemalloc_copy_traces(tracemalloc_traces); TABLES_UNLOCK(); if (get_traces.traces == NULL) { goto no_memory; } - if (err) { + + TABLES_LOCK(); + get_traces.domains = tracemalloc_copy_domains(tracemalloc_domains); + TABLES_UNLOCK(); + + if (get_traces.domains == NULL) { goto no_memory; } // Convert traces to a list of tuples set_reentrant(1); - err = _Py_hashtable_foreach(get_traces.traces, - tracemalloc_get_traces_fill, &get_traces); + int err = _Py_hashtable_foreach(get_traces.traces, + tracemalloc_get_traces_fill, + &get_traces); if (!err) { err = _Py_hashtable_foreach(get_traces.domains, - tracemalloc_get_traces_domain, &get_traces); + tracemalloc_get_traces_domain, + &get_traces); } set_reentrant(0); if (err) { @@ -1398,7 +1472,7 @@ _tracemalloc__get_traces_impl(PyObject *module) static traceback_t* tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) { - trace_t trace; + trace_t *trace; int found; if (!_Py_tracemalloc_config.tracing) @@ -1414,10 +1488,11 @@ tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) } TABLES_UNLOCK(); - if (!found) + if (!found) { return NULL; + } - return trace.traceback; + return trace->traceback; } @@ -1758,10 +1833,9 @@ _PyTraceMalloc_NewReference(PyObject *op) /* update the traceback of the memory block */ traceback_t *traceback = traceback_new(); if (traceback != NULL) { - trace_t trace; + trace_t *trace; _Py_HASHTABLE_ENTRY_READ_DATA(tracemalloc_traces, entry, trace); - trace.traceback = traceback; - _Py_HASHTABLE_ENTRY_WRITE_DATA(tracemalloc_traces, entry, trace); + trace->traceback = traceback; res = 0; } } diff --git a/Python/hashtable.c b/Python/hashtable.c index 0c013bbccf5573..e7681fb1565198 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -350,21 +350,6 @@ _Py_hashtable_pop(_Py_hashtable_t *ht, const void *key, } -/* Code commented since the function is not needed in Python */ -#if 0 -void -_Py_hashtable_delete(_Py_hashtable_t *ht, size_t const void *key) -{ -#ifndef NDEBUG - int found = _Py_hashtable_pop_entry(ht, key, NULL, 0); - assert(found); -#else - (void)_Py_hashtable_pop_entry(ht, key, NULL, 0); -#endif -} -#endif - - int _Py_hashtable_foreach(_Py_hashtable_t *ht, _Py_hashtable_foreach_func func, @@ -538,37 +523,3 @@ _Py_hashtable_destroy(_Py_hashtable_t *ht) ht->alloc.free(ht->buckets); ht->alloc.free(ht); } - - -_Py_hashtable_t * -_Py_hashtable_copy(_Py_hashtable_t *src) -{ - const size_t data_size = src->data_size; - _Py_hashtable_t *dst; - _Py_hashtable_entry_t *entry; - size_t bucket; - int err; - - dst = _Py_hashtable_new_full(data_size, src->num_buckets, - src->hash_func, - src->compare_func, - src->key_destroy_func, - src->value_destroy_func, - &src->alloc); - if (dst == NULL) - return NULL; - - for (bucket=0; bucket < src->num_buckets; bucket++) { - entry = TABLE_HEAD(src, bucket); - for (; entry; entry = ENTRY_NEXT(entry)) { - const void *key = entry->key; - const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(entry); - err = _Py_hashtable_set(dst, key, data_size, pdata); - if (err) { - _Py_hashtable_destroy(dst); - return NULL; - } - } - } - return dst; -} From 5b0a30354d8a8bb39a05ce10ca4f5c78b729f25b Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 13 May 2020 04:40:30 +0200 Subject: [PATCH 082/115] bpo-40609: _Py_hashtable_t values become void* (GH-20065) _Py_hashtable_t values become regular "void *" pointers. * Add _Py_hashtable_entry_t.data member * Remove _Py_hashtable_t.data_size member * Remove _Py_hashtable_t.get_func member. It is no longer needed to specialize _Py_hashtable_get() for a specific value size, since all entries now have the same size (void*). * Remove the following macros: * _Py_HASHTABLE_GET() * _Py_HASHTABLE_SET() * _Py_HASHTABLE_SET_NODATA() * _Py_HASHTABLE_POP() * Rename _Py_hashtable_pop() to _Py_hashtable_steal() * _Py_hashtable_foreach() callback now gets key and value rather than entry. * Remove _Py_hashtable_value_destroy_func type. value_destroy_func callback now only has a single parameter: data (void*). --- Include/internal/pycore_hashtable.h | 88 +++--------- Modules/_tracemalloc.c | 215 ++++++++++------------------ Python/hashtable.c | 131 +++++------------ Python/marshal.c | 9 +- 4 files changed, 140 insertions(+), 303 deletions(-) diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 0da2ffdb389e51..2990f9e0c1cc6f 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -31,20 +31,9 @@ typedef struct { Py_uhash_t key_hash; void *key; - /* data (data_size bytes) follows */ + void *value; } _Py_hashtable_entry_t; -#define _Py_HASHTABLE_ENTRY_PDATA(ENTRY) \ - ((const void *)((char *)(ENTRY) \ - + sizeof(_Py_hashtable_entry_t))) - -#define _Py_HASHTABLE_ENTRY_READ_DATA(TABLE, ENTRY, DATA) \ - do { \ - assert(sizeof(DATA) == (TABLE)->data_size); \ - memcpy(&(DATA), _Py_HASHTABLE_ENTRY_PDATA((ENTRY)), \ - sizeof(DATA)); \ - } while (0) - /* _Py_hashtable: prototypes */ @@ -55,12 +44,8 @@ typedef struct _Py_hashtable_t _Py_hashtable_t; typedef Py_uhash_t (*_Py_hashtable_hash_func) (const void *key); typedef int (*_Py_hashtable_compare_func) (const void *key1, const void *key2); typedef void (*_Py_hashtable_destroy_func) (void *key); -typedef void (*_Py_hashtable_value_destroy_func) (_Py_hashtable_t *ht, - _Py_hashtable_entry_t *entry); typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *ht, const void *key); -typedef int (*_Py_hashtable_get_func) (_Py_hashtable_t *ht, - const void *key, void *data); typedef struct { /* allocate a memory block */ @@ -76,14 +61,12 @@ struct _Py_hashtable_t { size_t num_buckets; size_t entries; /* Total number of entries in the table. */ _Py_slist_t *buckets; - size_t data_size; - _Py_hashtable_get_func get_func; _Py_hashtable_get_entry_func get_entry_func; _Py_hashtable_hash_func hash_func; _Py_hashtable_compare_func compare_func; _Py_hashtable_destroy_func key_destroy_func; - _Py_hashtable_value_destroy_func value_destroy_func; + _Py_hashtable_destroy_func value_destroy_func; _Py_hashtable_allocator_t alloc; }; @@ -96,17 +79,14 @@ PyAPI_FUNC(int) _Py_hashtable_compare_direct( const void *key2); PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new( - size_t data_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func); PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full( - size_t data_size, - size_t init_size, _Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, _Py_hashtable_destroy_func key_destroy_func, - _Py_hashtable_value_destroy_func value_destroy_func, + _Py_hashtable_destroy_func value_destroy_func, _Py_hashtable_allocator_t *allocator); PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht); @@ -114,8 +94,8 @@ PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht); PyAPI_FUNC(void) _Py_hashtable_clear(_Py_hashtable_t *ht); typedef int (*_Py_hashtable_foreach_func) (_Py_hashtable_t *ht, - _Py_hashtable_entry_t *entry, - void *arg); + const void *key, const void *value, + void *user_data); /* Call func() on each entry of the hashtable. Iteration stops if func() result is non-zero, in this case it's the result @@ -123,68 +103,42 @@ typedef int (*_Py_hashtable_foreach_func) (_Py_hashtable_t *ht, PyAPI_FUNC(int) _Py_hashtable_foreach( _Py_hashtable_t *ht, _Py_hashtable_foreach_func func, - void *arg); + void *user_data); -PyAPI_FUNC(size_t) _Py_hashtable_size(_Py_hashtable_t *ht); +PyAPI_FUNC(size_t) _Py_hashtable_size(const _Py_hashtable_t *ht); /* Add a new entry to the hash. The key must not be present in the hash table. - Return 0 on success, -1 on memory error. - - Don't call directly this function, - but use _Py_HASHTABLE_SET() and _Py_HASHTABLE_SET_NODATA() macros */ + Return 0 on success, -1 on memory error. */ PyAPI_FUNC(int) _Py_hashtable_set( _Py_hashtable_t *ht, const void *key, - size_t data_size, - const void *data); - -#define _Py_HASHTABLE_SET(TABLE, KEY, DATA) \ - _Py_hashtable_set(TABLE, (KEY), sizeof(DATA), &(DATA)) - -#define _Py_HASHTABLE_SET_NODATA(TABLE, KEY) \ - _Py_hashtable_set(TABLE, (KEY), 0, NULL) + void *value); /* Get an entry. - Return NULL if the key does not exist. - - Don't call directly this function, but use _Py_HASHTABLE_GET_ENTRY() - macro */ + Return NULL if the key does not exist. */ static inline _Py_hashtable_entry_t * _Py_hashtable_get_entry(_Py_hashtable_t *ht, const void *key) { return ht->get_entry_func(ht, key); } -#define _Py_HASHTABLE_GET_ENTRY(TABLE, KEY) \ - _Py_hashtable_get_entry(TABLE, (const void *)(KEY)) +/* Get value from an entry. + Return NULL if the entry is not found. -/* Get data from an entry. Copy entry data into data and return 1 if the entry - exists, return 0 if the entry does not exist. + Use _Py_hashtable_get_entry() to distinguish entry value equal to NULL + and entry not found. */ +extern void *_Py_hashtable_get(_Py_hashtable_t *ht, const void *key); - Don't call directly this function, but use _Py_HASHTABLE_GET() macro */ -static inline int -_Py_hashtable_get(_Py_hashtable_t *ht, const void *key, - size_t data_size, void *data) -{ - assert(data_size == ht->data_size); - return ht->get_func(ht, key, data); -} - -#define _Py_HASHTABLE_GET(TABLE, KEY, DATA) \ - _Py_hashtable_get(TABLE, (KEY), sizeof(DATA), &(DATA)) - -/* Don't call directly this function, but use _Py_HASHTABLE_POP() macro */ -PyAPI_FUNC(int) _Py_hashtable_pop( +// Remove a key and its associated value without calling key and value destroy +// functions. +// Return the removed value if the key was found. +// Return NULL if the key was not found. +PyAPI_FUNC(void*) _Py_hashtable_steal( _Py_hashtable_t *ht, - const void *key, - size_t data_size, - void *data); - -#define _Py_HASHTABLE_POP(TABLE, KEY, DATA) \ - _Py_hashtable_pop(TABLE, (KEY), sizeof(DATA), &(DATA)) + const void *key); #ifdef __cplusplus diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index a42349a8e47192..4522d1afde9089 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -23,8 +23,8 @@ static void raw_free(void *ptr); # define TRACE_DEBUG #endif -#define TO_PTR(key) ((const void *)(uintptr_t)key) -#define FROM_PTR(key) ((uintptr_t)key) +#define TO_PTR(key) ((const void *)(uintptr_t)(key)) +#define FROM_PTR(key) ((uintptr_t)(key)) /* Protected by the GIL */ static struct { @@ -236,15 +236,15 @@ hashtable_hash_uint(const void *key_raw) static _Py_hashtable_t * -hashtable_new(size_t data_size, - _Py_hashtable_hash_func hash_func, +hashtable_new(_Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, - _Py_hashtable_value_destroy_func value_destroy_fun) + _Py_hashtable_destroy_func key_destroy_func, + _Py_hashtable_destroy_func value_destroy_func) { _Py_hashtable_allocator_t hashtable_alloc = {malloc, free}; - return _Py_hashtable_new_full(data_size, 0, - hash_func, compare_func, - NULL, value_destroy_fun, &hashtable_alloc); + return _Py_hashtable_new_full(hash_func, compare_func, + key_destroy_func, value_destroy_func, + &hashtable_alloc); } @@ -340,7 +340,7 @@ tracemalloc_get_frame(PyFrameObject *pyframe, frame_t *frame) /* intern the filename */ _Py_hashtable_entry_t *entry; - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_filenames, filename); + entry = _Py_hashtable_get_entry(tracemalloc_filenames, filename); if (entry != NULL) { filename = (PyObject *)entry->key; } @@ -348,7 +348,7 @@ tracemalloc_get_frame(PyFrameObject *pyframe, frame_t *frame) /* tracemalloc_filenames is responsible to keep a reference to the filename */ Py_INCREF(filename); - if (_Py_HASHTABLE_SET_NODATA(tracemalloc_filenames, filename) < 0) { + if (_Py_hashtable_set(tracemalloc_filenames, filename, NULL) < 0) { Py_DECREF(filename); #ifdef TRACE_DEBUG tracemalloc_error("failed to intern the filename"); @@ -435,7 +435,7 @@ traceback_new(void) traceback->hash = traceback_hash(traceback); /* intern the traceback */ - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_tracebacks, traceback); + entry = _Py_hashtable_get_entry(tracemalloc_tracebacks, traceback); if (entry != NULL) { traceback = (traceback_t *)entry->key; } @@ -454,7 +454,7 @@ traceback_new(void) } memcpy(copy, traceback, traceback_size); - if (_Py_HASHTABLE_SET_NODATA(tracemalloc_tracebacks, copy) < 0) { + if (_Py_hashtable_set(tracemalloc_tracebacks, copy, NULL) < 0) { raw_free(copy); #ifdef TRACE_DEBUG tracemalloc_error("failed to intern the traceback: putdata failed"); @@ -467,50 +467,22 @@ traceback_new(void) } -static void -tracemalloc_destroy_trace_cb(_Py_hashtable_t *traces, - _Py_hashtable_entry_t *entry) -{ - trace_t *trace; - _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); - raw_free(trace); -} - - static _Py_hashtable_t* tracemalloc_create_traces_table(void) { - return hashtable_new(sizeof(trace_t*), - _Py_hashtable_hash_ptr, + return hashtable_new(_Py_hashtable_hash_ptr, _Py_hashtable_compare_direct, - tracemalloc_destroy_trace_cb); -} - - -static void -tracemalloc_destroy_domain_table(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry) -{ - _Py_hashtable_t *traces; - _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); - _Py_hashtable_destroy(traces); + NULL, raw_free); } static _Py_hashtable_t* tracemalloc_create_domains_table(void) { - return hashtable_new(sizeof(_Py_hashtable_t *), - hashtable_hash_uint, + return hashtable_new(hashtable_hash_uint, _Py_hashtable_compare_direct, - tracemalloc_destroy_domain_table); -} - - -static void -tracemalloc_destroy_domains(_Py_hashtable_t *domains) -{ - _Py_hashtable_destroy(domains); + NULL, + (_Py_hashtable_destroy_func)_Py_hashtable_destroy); } @@ -521,9 +493,7 @@ tracemalloc_get_traces_table(unsigned int domain) return tracemalloc_traces; } else { - _Py_hashtable_t *traces = NULL; - (void)_Py_HASHTABLE_GET(tracemalloc_domains, TO_PTR(domain), traces); - return traces; + return _Py_hashtable_get(tracemalloc_domains, TO_PTR(domain)); } } @@ -538,8 +508,8 @@ tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) return; } - trace_t *trace; - if (!_Py_HASHTABLE_POP(traces, TO_PTR(ptr), trace)) { + trace_t *trace = _Py_hashtable_steal(traces, TO_PTR(ptr)); + if (!trace) { return; } assert(tracemalloc_traced_memory >= trace->size); @@ -569,17 +539,15 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, return -1; } - if (_Py_HASHTABLE_SET(tracemalloc_domains, TO_PTR(domain), traces) < 0) { + if (_Py_hashtable_set(tracemalloc_domains, TO_PTR(domain), traces) < 0) { _Py_hashtable_destroy(traces); return -1; } } - _Py_hashtable_entry_t* entry = _Py_HASHTABLE_GET_ENTRY(traces, ptr); - if (entry != NULL) { + trace_t *trace = _Py_hashtable_get(traces, TO_PTR(ptr)); + if (trace != NULL) { /* the memory block is already tracked */ - trace_t *trace; - _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); assert(tracemalloc_traced_memory >= trace->size); tracemalloc_traced_memory -= trace->size; @@ -587,14 +555,14 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, trace->traceback = traceback; } else { - trace_t *trace = raw_malloc(sizeof(trace_t)); + trace = raw_malloc(sizeof(trace_t)); if (trace == NULL) { return -1; } trace->size = size; trace->traceback = traceback; - int res = _Py_HASHTABLE_SET(traces, TO_PTR(ptr), trace); + int res = _Py_hashtable_set(traces, TO_PTR(ptr), trace); if (res != 0) { raw_free(trace); return res; @@ -860,23 +828,11 @@ tracemalloc_raw_realloc(void *ctx, void *ptr, size_t new_size) #endif /* TRACE_RAW_MALLOC */ -static int -tracemalloc_clear_filename(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, - void *user_data) +static void +tracemalloc_clear_filename(void *value) { - PyObject *filename = (PyObject *)entry->key; + PyObject *filename = (PyObject *)value; Py_DECREF(filename); - return 0; -} - - -static int -traceback_free_traceback(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, - void *user_data) -{ - traceback_t *traceback = (traceback_t *)entry->key; - raw_free(traceback); - return 0; } @@ -894,10 +850,8 @@ tracemalloc_clear_traces(void) tracemalloc_peak_traced_memory = 0; TABLES_UNLOCK(); - _Py_hashtable_foreach(tracemalloc_tracebacks, traceback_free_traceback, NULL); _Py_hashtable_clear(tracemalloc_tracebacks); - _Py_hashtable_foreach(tracemalloc_filenames, tracemalloc_clear_filename, NULL); _Py_hashtable_clear(tracemalloc_filenames); } @@ -937,15 +891,13 @@ tracemalloc_init(void) } #endif - tracemalloc_filenames = hashtable_new(0, - hashtable_hash_pyobject, + tracemalloc_filenames = hashtable_new(hashtable_hash_pyobject, hashtable_compare_unicode, - NULL); + tracemalloc_clear_filename, NULL); - tracemalloc_tracebacks = hashtable_new(0, - hashtable_hash_traceback, + tracemalloc_tracebacks = hashtable_new(hashtable_hash_traceback, hashtable_compare_traceback, - NULL); + NULL, raw_free); tracemalloc_traces = tracemalloc_create_traces_table(); tracemalloc_domains = tracemalloc_create_domains_table(); @@ -983,7 +935,7 @@ tracemalloc_deinit(void) tracemalloc_stop(); /* destroy hash tables */ - tracemalloc_destroy_domains(tracemalloc_domains); + _Py_hashtable_destroy(tracemalloc_domains); _Py_hashtable_destroy(tracemalloc_traces); _Py_hashtable_destroy(tracemalloc_tracebacks); _Py_hashtable_destroy(tracemalloc_filenames); @@ -1153,11 +1105,11 @@ frame_to_pyobject(frame_t *frame) static PyObject* traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) { - int i; - PyObject *frames, *frame; + PyObject *frames; if (intern_table != NULL) { - if (_Py_HASHTABLE_GET(intern_table, (const void *)traceback, frames)) { + frames = _Py_hashtable_get(intern_table, (const void *)traceback); + if (frames) { Py_INCREF(frames); return frames; } @@ -1167,8 +1119,8 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) if (frames == NULL) return NULL; - for (i=0; i < traceback->nframe; i++) { - frame = frame_to_pyobject(&traceback->frames[i]); + for (int i=0; i < traceback->nframe; i++) { + PyObject *frame = frame_to_pyobject(&traceback->frames[i]); if (frame == NULL) { Py_DECREF(frames); return NULL; @@ -1177,7 +1129,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) } if (intern_table != NULL) { - if (_Py_HASHTABLE_SET(intern_table, traceback, frames) < 0) { + if (_Py_hashtable_set(intern_table, traceback, frames) < 0) { Py_DECREF(frames); PyErr_NoMemory(); return NULL; @@ -1190,7 +1142,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) static PyObject* -trace_to_pyobject(unsigned int domain, trace_t *trace, +trace_to_pyobject(unsigned int domain, const trace_t *trace, _Py_hashtable_t *intern_tracebacks) { PyObject *trace_obj = NULL; @@ -1243,20 +1195,19 @@ typedef struct { static int tracemalloc_copy_trace(_Py_hashtable_t *traces, - _Py_hashtable_entry_t *entry, - void *traces2_raw) + const void *key, const void *value, + void *user_data) { - _Py_hashtable_t *traces2 = (_Py_hashtable_t *)traces2_raw; + _Py_hashtable_t *traces2 = (_Py_hashtable_t *)user_data; - trace_t *trace; - _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); + trace_t *trace = (trace_t *)value; trace_t *trace2 = raw_malloc(sizeof(trace_t)); if (traces2 == NULL) { return -1; } *trace2 = *trace; - if (_Py_HASHTABLE_SET(traces2, entry->key, trace2) < 0) { + if (_Py_hashtable_set(traces2, key, trace2) < 0) { raw_free(trace2); return -1; } @@ -1285,17 +1236,16 @@ tracemalloc_copy_traces(_Py_hashtable_t *traces) static int tracemalloc_copy_domain(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry, - void *domains2_raw) + const void *key, const void *value, + void *user_data) { - _Py_hashtable_t *domains2 = (_Py_hashtable_t *)domains2_raw; + _Py_hashtable_t *domains2 = (_Py_hashtable_t *)user_data; - unsigned int domain = (unsigned int)FROM_PTR(entry->key); - _Py_hashtable_t *traces; - _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + unsigned int domain = (unsigned int)FROM_PTR(key); + _Py_hashtable_t *traces = (_Py_hashtable_t *)value; _Py_hashtable_t *traces2 = tracemalloc_copy_traces(traces); - if (_Py_HASHTABLE_SET(domains2, TO_PTR(domain), traces2) < 0) { + if (_Py_hashtable_set(domains2, TO_PTR(domain), traces2) < 0) { _Py_hashtable_destroy(traces2); return -1; } @@ -1323,24 +1273,25 @@ tracemalloc_copy_domains(_Py_hashtable_t *domains) static int -tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entry, +tracemalloc_get_traces_fill(_Py_hashtable_t *traces, + const void *key, const void *value, void *user_data) { get_traces_t *get_traces = user_data; - trace_t *trace; - PyObject *tracemalloc_obj; - int res; - _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); + const trace_t *trace = (const trace_t *)value; - tracemalloc_obj = trace_to_pyobject(get_traces->domain, trace, get_traces->tracebacks); - if (tracemalloc_obj == NULL) + PyObject *tuple = trace_to_pyobject(get_traces->domain, trace, + get_traces->tracebacks); + if (tuple == NULL) { return 1; + } - res = PyList_Append(get_traces->list, tracemalloc_obj); - Py_DECREF(tracemalloc_obj); - if (res < 0) + int res = PyList_Append(get_traces->list, tuple); + Py_DECREF(tuple); + if (res < 0) { return 1; + } return 0; } @@ -1348,14 +1299,13 @@ tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entr static int tracemalloc_get_traces_domain(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry, + const void *key, const void *value, void *user_data) { get_traces_t *get_traces = user_data; - unsigned int domain = (unsigned int)FROM_PTR(entry->key); - _Py_hashtable_t *traces; - _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); + unsigned int domain = (unsigned int)FROM_PTR(key); + _Py_hashtable_t *traces = (_Py_hashtable_t *)value; get_traces->domain = domain; return _Py_hashtable_foreach(traces, @@ -1365,11 +1315,9 @@ tracemalloc_get_traces_domain(_Py_hashtable_t *domains, static void -tracemalloc_pyobject_decref_cb(_Py_hashtable_t *tracebacks, - _Py_hashtable_entry_t *entry) +tracemalloc_pyobject_decref(void *value) { - PyObject *obj; - _Py_HASHTABLE_ENTRY_READ_DATA(tracebacks, entry, obj); + PyObject *obj = (PyObject *)value; Py_DECREF(obj); } @@ -1404,10 +1352,9 @@ _tracemalloc__get_traces_impl(PyObject *module) /* the traceback hash table is used temporarily to intern traceback tuple of (filename, lineno) tuples */ - get_traces.tracebacks = hashtable_new(sizeof(PyObject *), - _Py_hashtable_hash_ptr, + get_traces.tracebacks = hashtable_new(_Py_hashtable_hash_ptr, _Py_hashtable_compare_direct, - tracemalloc_pyobject_decref_cb); + NULL, tracemalloc_pyobject_decref); if (get_traces.tracebacks == NULL) { goto no_memory; } @@ -1462,7 +1409,7 @@ _tracemalloc__get_traces_impl(PyObject *module) _Py_hashtable_destroy(get_traces.traces); } if (get_traces.domains != NULL) { - tracemalloc_destroy_domains(get_traces.domains); + _Py_hashtable_destroy(get_traces.domains); } return get_traces.list; @@ -1472,23 +1419,22 @@ _tracemalloc__get_traces_impl(PyObject *module) static traceback_t* tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) { - trace_t *trace; - int found; if (!_Py_tracemalloc_config.tracing) return NULL; + trace_t *trace; TABLES_LOCK(); _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); if (traces) { - found = _Py_HASHTABLE_GET(traces, TO_PTR(ptr), trace); + trace = _Py_hashtable_get(traces, TO_PTR(ptr)); } else { - found = 0; + trace = NULL; } TABLES_UNLOCK(); - if (!found) { + if (!trace) { return NULL; } @@ -1634,12 +1580,10 @@ _tracemalloc_get_traceback_limit_impl(PyObject *module) static int tracemalloc_get_tracemalloc_memory_cb(_Py_hashtable_t *domains, - _Py_hashtable_entry_t *entry, + const void *key, const void *value, void *user_data) { - _Py_hashtable_t *traces; - _Py_HASHTABLE_ENTRY_READ_DATA(domains, entry, traces); - + const _Py_hashtable_t *traces = value; size_t *size = (size_t*)user_data; *size += _Py_hashtable_size(traces); return 0; @@ -1827,14 +1771,11 @@ _PyTraceMalloc_NewReference(PyObject *op) int res = -1; TABLES_LOCK(); - _Py_hashtable_entry_t* entry; - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr); - if (entry != NULL) { + trace_t *trace = _Py_hashtable_get(tracemalloc_traces, TO_PTR(ptr)); + if (trace != NULL) { /* update the traceback of the memory block */ traceback_t *traceback = traceback_new(); if (traceback != NULL) { - trace_t *trace; - _Py_HASHTABLE_ENTRY_READ_DATA(tracemalloc_traces, entry, trace); trace->traceback = traceback; res = 0; } diff --git a/Python/hashtable.c b/Python/hashtable.c index e7681fb1565198..dc4af3395181cd 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -58,22 +58,6 @@ ((_Py_hashtable_entry_t *)_Py_SLIST_HEAD(&(HT)->buckets[BUCKET])) #define ENTRY_NEXT(ENTRY) \ ((_Py_hashtable_entry_t *)_Py_SLIST_ITEM_NEXT(ENTRY)) -#define HASHTABLE_ITEM_SIZE(HT) \ - (sizeof(_Py_hashtable_entry_t) + (HT)->data_size) - -#define ENTRY_READ_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ - do { \ - assert((DATA_SIZE) == (TABLE)->data_size); \ - memcpy((PDATA), _Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ - (DATA_SIZE)); \ - } while (0) - -#define ENTRY_WRITE_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ - do { \ - assert((DATA_SIZE) == (TABLE)->data_size); \ - memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA(ENTRY), \ - (PDATA), (DATA_SIZE)); \ - } while (0) /* Forward declaration */ static void hashtable_rehash(_Py_hashtable_t *ht); @@ -133,7 +117,7 @@ round_size(size_t s) size_t -_Py_hashtable_size(_Py_hashtable_t *ht) +_Py_hashtable_size(const _Py_hashtable_t *ht) { size_t size; @@ -143,7 +127,7 @@ _Py_hashtable_size(_Py_hashtable_t *ht) size += ht->num_buckets * sizeof(_Py_hashtable_entry_t *); /* entries */ - size += ht->entries * HASHTABLE_ITEM_SIZE(ht); + size += ht->entries * sizeof(_Py_hashtable_entry_t); return size; } @@ -209,11 +193,9 @@ _Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) } -static int -_Py_hashtable_pop_entry(_Py_hashtable_t *ht, const void *key, - void *data, size_t data_size) +void* +_Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) { - Py_uhash_t key_hash = ht->hash_func(key); size_t index = key_hash & (ht->num_buckets - 1); @@ -222,7 +204,7 @@ _Py_hashtable_pop_entry(_Py_hashtable_t *ht, const void *key, while (1) { if (entry == NULL) { // not found - return 0; + return NULL; } if (entry->key_hash == key_hash && ht->compare_func(key, entry->key)) { break; @@ -235,23 +217,21 @@ _Py_hashtable_pop_entry(_Py_hashtable_t *ht, const void *key, (_Py_slist_item_t *)entry); ht->entries--; - if (data != NULL) - ENTRY_READ_PDATA(ht, entry, data_size, data); + void *value = entry->value; ht->alloc.free(entry); - if ((float)ht->entries / (float)ht->num_buckets < HASHTABLE_LOW) + if ((float)ht->entries / (float)ht->num_buckets < HASHTABLE_LOW) { hashtable_rehash(ht); - return 1; + } + return value; } int -_Py_hashtable_set(_Py_hashtable_t *ht, const void *key, - size_t data_size, const void *data) +_Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value) { _Py_hashtable_entry_t *entry; - assert(data != NULL || data_size == 0); #ifndef NDEBUG /* Don't write the assertion on a single line because it is interesting to know the duplicated entry if the assertion failed. The entry can @@ -263,7 +243,7 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, Py_uhash_t key_hash = ht->hash_func(key); size_t index = key_hash & (ht->num_buckets - 1); - entry = ht->alloc.malloc(HASHTABLE_ITEM_SIZE(ht)); + entry = ht->alloc.malloc(sizeof(_Py_hashtable_entry_t)); if (entry == NULL) { /* memory allocation failed */ return -1; @@ -271,9 +251,7 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, entry->key_hash = key_hash; entry->key = (void *)key; - if (data) { - ENTRY_WRITE_PDATA(ht, entry, data_size, data); - } + entry->value = value; _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); ht->entries++; @@ -284,17 +262,15 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, } -int -_Py_hashtable_get_generic(_Py_hashtable_t *ht, const void *key, void *data) +void* +_Py_hashtable_get(_Py_hashtable_t *ht, const void *key) { - assert(data != NULL); _Py_hashtable_entry_t *entry = ht->get_entry_func(ht, key); if (entry != NULL) { - ENTRY_READ_PDATA(ht, entry, ht->data_size, data); - return 1; + return entry->value; } else { - return 0; + return NULL; } } @@ -323,44 +299,17 @@ _Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) } -// Specialized for: -// hash_func == _Py_hashtable_hash_ptr -// compare_func == _Py_hashtable_compare_direct -int -_Py_hashtable_get_ptr(_Py_hashtable_t *ht, const void *key, void *data) -{ - assert(data != NULL); - _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry_ptr(ht, key); - if (entry != NULL) { - ENTRY_READ_PDATA(ht, entry, ht->data_size, data); - return 1; - } - else { - return 0; - } -} - - -int -_Py_hashtable_pop(_Py_hashtable_t *ht, const void *key, - size_t data_size, void *data) -{ - assert(data != NULL); - return _Py_hashtable_pop_entry(ht, key, data, data_size); -} - - int _Py_hashtable_foreach(_Py_hashtable_t *ht, _Py_hashtable_foreach_func func, - void *arg) + void *user_data) { _Py_hashtable_entry_t *entry; size_t hv; for (hv = 0; hv < ht->num_buckets; hv++) { for (entry = TABLE_HEAD(ht, hv); entry; entry = ENTRY_NEXT(entry)) { - int res = func(ht, entry, arg); + int res = func(ht, entry->key, entry->value, user_data); if (res) return res; } @@ -414,11 +363,10 @@ hashtable_rehash(_Py_hashtable_t *ht) _Py_hashtable_t * -_Py_hashtable_new_full(size_t data_size, size_t init_size, - _Py_hashtable_hash_func hash_func, +_Py_hashtable_new_full(_Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func, _Py_hashtable_destroy_func key_destroy_func, - _Py_hashtable_value_destroy_func value_destroy_func, + _Py_hashtable_destroy_func value_destroy_func, _Py_hashtable_allocator_t *allocator) { _Py_hashtable_t *ht; @@ -437,9 +385,8 @@ _Py_hashtable_new_full(size_t data_size, size_t init_size, if (ht == NULL) return ht; - ht->num_buckets = round_size(init_size); + ht->num_buckets = HASHTABLE_MIN_SIZE; ht->entries = 0; - ht->data_size = data_size; buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); ht->buckets = alloc.malloc(buckets_size); @@ -449,7 +396,6 @@ _Py_hashtable_new_full(size_t data_size, size_t init_size, } memset(ht->buckets, 0, buckets_size); - ht->get_func = _Py_hashtable_get_generic; ht->get_entry_func = _Py_hashtable_get_entry_generic; ht->hash_func = hash_func; ht->compare_func = compare_func; @@ -459,7 +405,6 @@ _Py_hashtable_new_full(size_t data_size, size_t init_size, if (ht->hash_func == _Py_hashtable_hash_ptr && ht->compare_func == _Py_hashtable_compare_direct) { - ht->get_func = _Py_hashtable_get_ptr; ht->get_entry_func = _Py_hashtable_get_entry_ptr; } return ht; @@ -467,16 +412,27 @@ _Py_hashtable_new_full(size_t data_size, size_t init_size, _Py_hashtable_t * -_Py_hashtable_new(size_t data_size, - _Py_hashtable_hash_func hash_func, +_Py_hashtable_new(_Py_hashtable_hash_func hash_func, _Py_hashtable_compare_func compare_func) { - return _Py_hashtable_new_full(data_size, HASHTABLE_MIN_SIZE, - hash_func, compare_func, + return _Py_hashtable_new_full(hash_func, compare_func, NULL, NULL, NULL); } +static void +_Py_hashtable_destroy_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry) +{ + if (ht->key_destroy_func) { + ht->key_destroy_func(entry->key); + } + if (ht->value_destroy_func) { + ht->value_destroy_func(entry->value); + } + ht->alloc.free(entry); +} + + void _Py_hashtable_clear(_Py_hashtable_t *ht) { @@ -486,7 +442,7 @@ _Py_hashtable_clear(_Py_hashtable_t *ht) for (i=0; i < ht->num_buckets; i++) { for (entry = TABLE_HEAD(ht, i); entry != NULL; entry = next) { next = ENTRY_NEXT(entry); - ht->alloc.free(entry); + _Py_hashtable_destroy_entry(ht, entry); } _Py_slist_init(&ht->buckets[i]); } @@ -495,19 +451,6 @@ _Py_hashtable_clear(_Py_hashtable_t *ht) } -static void -_Py_hashtable_destroy_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry) -{ - if (ht->key_destroy_func) { - ht->key_destroy_func(entry->key); - } - if (ht->value_destroy_func) { - ht->value_destroy_func(ht, entry); - } - ht->alloc.free(entry); -} - - void _Py_hashtable_destroy(_Py_hashtable_t *ht) { diff --git a/Python/marshal.c b/Python/marshal.c index 7c99c1ee13c0ee..b096ff89322209 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -302,10 +302,10 @@ w_ref(PyObject *v, char *flag, WFILE *p) if (Py_REFCNT(v) == 1) return 0; - entry = _Py_HASHTABLE_GET_ENTRY(p->hashtable, v); + entry = _Py_hashtable_get_entry(p->hashtable, v); if (entry != NULL) { /* write the reference index to the stream */ - _Py_HASHTABLE_ENTRY_READ_DATA(p->hashtable, entry, w); + w = (int)(uintptr_t)entry->value; /* we don't store "long" indices in the dict */ assert(0 <= w && w <= 0x7fffffff); w_byte(TYPE_REF, p); @@ -320,7 +320,7 @@ w_ref(PyObject *v, char *flag, WFILE *p) } w = (int)s; Py_INCREF(v); - if (_Py_HASHTABLE_SET(p->hashtable, v, w) < 0) { + if (_Py_hashtable_set(p->hashtable, v, (void *)(uintptr_t)w) < 0) { Py_DECREF(v); goto err; } @@ -556,8 +556,7 @@ static int w_init_refs(WFILE *wf, int version) { if (version >= 3) { - wf->hashtable = _Py_hashtable_new_full(sizeof(int), 0, - _Py_hashtable_hash_ptr, + wf->hashtable = _Py_hashtable_new_full(_Py_hashtable_hash_ptr, _Py_hashtable_compare_direct, w_decref_entry, NULL, NULL); if (wf->hashtable == NULL) { From 42bae3a3d9d79f28e6b3b619bd27296d125c4c2c Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Wed, 13 May 2020 05:36:23 +0200 Subject: [PATCH 083/115] bpo-40602: Optimize _Py_hashtable_get_ptr() (GH-20066) _Py_hashtable_get_entry_ptr() avoids comparing the entry hash: compare directly keys. Move _Py_hashtable_get_entry_ptr() just after _Py_hashtable_get_entry_generic(). --- Python/hashtable.c | 47 +++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/Python/hashtable.c b/Python/hashtable.c index dc4af3395181cd..d1467ad94ed55c 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -193,6 +193,29 @@ _Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) } +// Specialized for: +// hash_func == _Py_hashtable_hash_ptr +// compare_func == _Py_hashtable_compare_direct +static _Py_hashtable_entry_t * +_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) +{ + Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key); + size_t index = key_hash & (ht->num_buckets - 1); + _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); + while (1) { + if (entry == NULL) { + return NULL; + } + // Compare directly keys (ignore entry->key_hash) + if (entry->key == key) { + break; + } + entry = ENTRY_NEXT(entry); + } + return entry; +} + + void* _Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) { @@ -275,30 +298,6 @@ _Py_hashtable_get(_Py_hashtable_t *ht, const void *key) } -// Specialized for: -// hash_func == _Py_hashtable_hash_ptr -// compare_func == _Py_hashtable_compare_direct -_Py_hashtable_entry_t * -_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) -{ - Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key); - size_t index = key_hash & (ht->num_buckets - 1); - _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); - while (1) { - if (entry == NULL) { - return NULL; - } - if (entry->key_hash == key_hash) { - if (entry->key == key) { - break; - } - } - entry = ENTRY_NEXT(entry); - } - return entry; -} - - int _Py_hashtable_foreach(_Py_hashtable_t *ht, _Py_hashtable_foreach_func func, From b809717c1ead26b4e3693b8a5505dd8f8f666f08 Mon Sep 17 00:00:00 2001 From: Tzanetos Balitsaris Date: Wed, 13 May 2020 13:29:31 +0300 Subject: [PATCH 084/115] bpo-40331: Increase test coverage for the statistics module (GH-19608) --- Lib/test/test_statistics.py | 60 +++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 0e46a7119f0efc..5c3b1fdd8b110d 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -1004,6 +1004,10 @@ def test_nan(self): x = statistics._convert(nan, type(nan)) self.assertTrue(_nan_equal(x, nan)) + def test_invalid_input_type(self): + with self.assertRaises(TypeError): + statistics._convert(None, float) + class FailNegTest(unittest.TestCase): """Test _fail_neg private function.""" @@ -1033,6 +1037,50 @@ def test_error_msg(self): self.assertEqual(errmsg, msg) +class FindLteqTest(unittest.TestCase): + # Test _find_lteq private function. + + def test_invalid_input_values(self): + for a, x in [ + ([], 1), + ([1, 2], 3), + ([1, 3], 2) + ]: + with self.subTest(a=a, x=x): + with self.assertRaises(ValueError): + statistics._find_lteq(a, x) + + def test_locate_successfully(self): + for a, x, expected_i in [ + ([1, 1, 1, 2, 3], 1, 0), + ([0, 1, 1, 1, 2, 3], 1, 1), + ([1, 2, 3, 3, 3], 3, 2) + ]: + with self.subTest(a=a, x=x): + self.assertEqual(expected_i, statistics._find_lteq(a, x)) + + +class FindRteqTest(unittest.TestCase): + # Test _find_rteq private function. + + def test_invalid_input_values(self): + for a, l, x in [ + ([1], 2, 1), + ([1, 3], 0, 2) + ]: + with self.assertRaises(ValueError): + statistics._find_rteq(a, l, x) + + def test_locate_successfully(self): + for a, l, x, expected_i in [ + ([1, 1, 1, 2, 3], 0, 1, 2), + ([0, 1, 1, 1, 2, 3], 0, 1, 3), + ([1, 2, 3, 3, 3], 0, 3, 4) + ]: + with self.subTest(a=a, l=l, x=x): + self.assertEqual(expected_i, statistics._find_rteq(a, l, x)) + + # === Tests for public functions === class UnivariateCommonMixin: @@ -1476,6 +1524,18 @@ def test_negative_error(self): with self.subTest(values=values): self.assertRaises(exc, self.func, values) + def test_invalid_type_error(self): + # Test error is raised when input contains invalid type(s) + for data in [ + ['3.14'], # single string + ['1', '2', '3'], # multiple strings + [1, '2', 3, '4', 5], # mixed strings and valid integers + [2.3, 3.4, 4.5, '5.6'] # only one string and valid floats + ]: + with self.subTest(data=data): + with self.assertRaises(TypeError): + self.func(data) + def test_ints(self): # Test harmonic mean with ints. data = [2, 4, 4, 8, 16, 16] From fa0a66e62d087765dbc5c1b89d6149a23ecfb0a6 Mon Sep 17 00:00:00 2001 From: Dong-hee Na Date: Wed, 13 May 2020 22:38:27 +0900 Subject: [PATCH 085/115] bpo-40613: Remove compiler warning from _xxsubinterpretersmodule (GH-20069) --- Modules/_xxsubinterpretersmodule.c | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 9c5df16e156a1d..18dd8918e7c89a 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -329,6 +329,7 @@ _objsnapshot_clear(_objsnapshot *osn) // PyMem_Free(osn); //} +#ifndef NDEBUG static int _objsnapshot_is_clear(_objsnapshot *osn) { @@ -336,6 +337,7 @@ _objsnapshot_is_clear(_objsnapshot *osn) && _rawstring_is_clear(&osn->modname) && _rawstring_is_clear(&osn->clsname); } +#endif static void _objsnapshot_summarize(_objsnapshot *osn, _rawstring *rawbuf, const char *msg) @@ -597,6 +599,7 @@ _tbsnapshot_free(_tbsnapshot *tbs) PyMem_Free(tbs); } +#ifndef NDEBUG static int _tbsnapshot_is_clear(_tbsnapshot *tbs) { @@ -604,6 +607,7 @@ _tbsnapshot_is_clear(_tbsnapshot *tbs) && _rawstring_is_clear(&tbs->tbs_funcname) && _rawstring_is_clear(&tbs->tbs_filename); } +#endif static int _tbsnapshot_from_pytb(_tbsnapshot *tbs, PyTracebackObject *pytb) @@ -748,6 +752,7 @@ _excsnapshot_free(_excsnapshot *es) PyMem_Free(es); } +#ifndef NDEBUG static int _excsnapshot_is_clear(_excsnapshot *es) { @@ -758,6 +763,7 @@ _excsnapshot_is_clear(_excsnapshot *es) && es->es_msg == NULL && _objsnapshot_is_clear(&es->es_object); } +#endif static PyObject * _excsnapshot_get_exc_naive(_excsnapshot *es) @@ -1085,6 +1091,7 @@ _sharedexception_free(_sharedexception *she) PyMem_Free(she); } +#ifndef NDEBUG static int _sharedexception_is_clear(_sharedexception *she) { @@ -1092,6 +1099,7 @@ _sharedexception_is_clear(_sharedexception *she) && _excsnapshot_is_clear(&she->snapshot) && _rawstring_is_clear(&she->msg); } +#endif static PyObject * _sharedexception_get_cause(_sharedexception *sharedexc) From de92769d473d1c0955d36da2fc71462621326f00 Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Wed, 13 May 2020 20:55:12 +0200 Subject: [PATCH 086/115] bpo-34790: add version of removal of explicit passing of coros to `asyncio.wait`'s documentation (#20008) --- Doc/library/asyncio-task.rst | 2 +- .../next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 6627bec79823a3..42e2b4e2fc5b91 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -575,7 +575,7 @@ Waiting Primitives if task in done: # Everything will work as expected now. - .. deprecated:: 3.8 + .. deprecated-removed:: 3.8 3.11 Passing coroutine objects to ``wait()`` directly is deprecated. diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst new file mode 100644 index 00000000000000..4f349adff33460 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst @@ -0,0 +1 @@ +Add version of removal for explicit passing of coros to `asyncio.wait()`'s documentation \ No newline at end of file From a15c9b3a0524e5ca0434d2ad11076677824af941 Mon Sep 17 00:00:00 2001 From: Lysandros Nikolaou Date: Wed, 13 May 2020 22:36:27 +0300 Subject: [PATCH 087/115] bpo-40334: Always show the caret on SyntaxErrors (GH-20050) This commit fixes SyntaxError locations when the caret is not displayed, by doing the following: - `col_number` always gets set to the location of the offending node/expr. When no caret is to be displayed, this gets achieved by setting the object holding the error line to None. - Introduce a new function `_PyPegen_raise_error_known_location`, which can be called, when an arbitrary `lineno`/`col_offset` needs to be passed. This function then gets used in the grammar (through some new macros and inline functions) so that SyntaxError locations of the new parser match that of the old. --- Grammar/python.gram | 29 +-- Lib/test/test_exceptions.py | 15 +- Parser/pegen/parse.c | 355 ++++++++++++++++++++++-------------- Parser/pegen/pegen.c | 61 ++++--- Parser/pegen/pegen.h | 25 ++- 5 files changed, 293 insertions(+), 192 deletions(-) diff --git a/Grammar/python.gram b/Grammar/python.gram index 0542107cac3e6b..84c89330e3ee9d 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -623,26 +623,31 @@ t_atom[expr_ty]: # From here on, there are rules for invalid syntax with specialised error messages incorrect_arguments: | args ',' '*' { RAISE_SYNTAX_ERROR("iterable argument unpacking follows keyword argument unpacking") } - | expression for_if_clauses ',' [args | expression for_if_clauses] { - RAISE_SYNTAX_ERROR("Generator expression must be parenthesized") } + | a=expression for_if_clauses ',' [args | expression for_if_clauses] { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "Generator expression must be parenthesized") } | a=args ',' args { _PyPegen_arguments_parsing_error(p, a) } invalid_kwarg: - | expression '=' { RAISE_SYNTAX_ERROR("expression cannot contain assignment, perhaps you meant \"==\"?") } + | a=expression '=' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, "expression cannot contain assignment, perhaps you meant \"==\"?") } invalid_named_expression: | a=expression ':=' expression { - RAISE_SYNTAX_ERROR("cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, "cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } invalid_assignment: - | list ':' { RAISE_SYNTAX_ERROR("only single target (not list) can be annotated") } - | tuple ':' { RAISE_SYNTAX_ERROR("only single target (not tuple) can be annotated") } - | expression ':' expression ['=' annotated_rhs] { - RAISE_SYNTAX_ERROR("illegal target for annotation") } + | a=list ':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not list) can be annotated") } + | a=tuple ':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } + | a=star_named_expression ',' star_named_expressions* ':' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } + | a=expression ':' expression ['=' annotated_rhs] { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") } | a=expression ('=' | augassign) (yield_expr | star_expressions) { - RAISE_SYNTAX_ERROR_NO_COL_OFFSET("cannot assign to %s", _PyPegen_get_expr_name(a)) } + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot assign to %s", _PyPegen_get_expr_name(a)) } invalid_block: | NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") } invalid_comprehension: - | ('[' | '(' | '{') '*' expression for_if_clauses { - RAISE_SYNTAX_ERROR("iterable unpacking cannot be used in comprehension") } + | ('[' | '(' | '{') a=starred_expression for_if_clauses { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "iterable unpacking cannot be used in comprehension") } invalid_parameters: | param_no_default* (slash_with_default | param_with_default+) param_no_default { RAISE_SYNTAX_ERROR("non-default argument follows default argument") } @@ -655,4 +660,4 @@ invalid_double_type_comments: RAISE_SYNTAX_ERROR("Cannot have two type comments on def") } invalid_del_target: | a=star_expression &del_target_end { - RAISE_SYNTAX_ERROR("cannot delete %s", _PyPegen_get_expr_name(a)) } + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot delete %s", _PyPegen_get_expr_name(a)) } diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index dbd7fa6bdd9385..b689ec7aed18de 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -242,16 +242,13 @@ def baz(): check('from __future__ import doesnt_exist', 1, 1) check('from __future__ import braces', 1, 1) check('x=1\nfrom __future__ import division', 2, 1) - check('(yield i) = 2', 1, 1) + check('foo(1=2)', 1, 5) + check('def f():\n x, y: int', 2, 3) + check('[*x for x in xs]', 1, 2) + check('foo(x for x in range(10), 100)', 1, 5) + check('(yield i) = 2', 1, 1 if support.use_old_parser() else 2) check('def f(*):\n pass', 1, 7 if support.use_old_parser() else 8) - check('foo(1=2)', 1, 5 if support.use_old_parser() else 6) - - @support.skip_if_new_parser("Pegen column offsets might be different") - def testSyntaxErrorOffsetCustom(self): - self.check('for 1 in []: pass', 1, 5) - self.check('[*x for x in xs]', 1, 2) - self.check('def f():\n x, y: int', 2, 3) - self.check('foo(x for x in range(10), 100)', 1, 5) + check('for 1 in []: pass', 1, 5 if support.use_old_parser() else 7) @cpython_only def testSettingException(self): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index a1a6f4c06bf63d..b1b248187ea3ed 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -346,12 +346,12 @@ static KeywordToken *reserved_keywords[] = { #define _loop0_124_type 1275 #define _gather_123_type 1276 #define _tmp_125_type 1277 -#define _tmp_126_type 1278 +#define _loop0_126_type 1278 #define _tmp_127_type 1279 #define _tmp_128_type 1280 #define _tmp_129_type 1281 -#define _loop0_130_type 1282 -#define _tmp_131_type 1283 +#define _tmp_130_type 1282 +#define _loop0_131_type 1283 #define _tmp_132_type 1284 #define _tmp_133_type 1285 #define _tmp_134_type 1286 @@ -365,9 +365,10 @@ static KeywordToken *reserved_keywords[] = { #define _tmp_142_type 1294 #define _tmp_143_type 1295 #define _tmp_144_type 1296 -#define _loop1_145_type 1297 -#define _tmp_146_type 1298 +#define _tmp_145_type 1297 +#define _loop1_146_type 1298 #define _tmp_147_type 1299 +#define _tmp_148_type 1300 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -647,12 +648,12 @@ static asdl_seq *_gather_121_rule(Parser *p); static asdl_seq *_loop0_124_rule(Parser *p); static asdl_seq *_gather_123_rule(Parser *p); static void *_tmp_125_rule(Parser *p); -static void *_tmp_126_rule(Parser *p); +static asdl_seq *_loop0_126_rule(Parser *p); static void *_tmp_127_rule(Parser *p); static void *_tmp_128_rule(Parser *p); static void *_tmp_129_rule(Parser *p); -static asdl_seq *_loop0_130_rule(Parser *p); -static void *_tmp_131_rule(Parser *p); +static void *_tmp_130_rule(Parser *p); +static asdl_seq *_loop0_131_rule(Parser *p); static void *_tmp_132_rule(Parser *p); static void *_tmp_133_rule(Parser *p); static void *_tmp_134_rule(Parser *p); @@ -666,9 +667,10 @@ static void *_tmp_141_rule(Parser *p); static void *_tmp_142_rule(Parser *p); static void *_tmp_143_rule(Parser *p); static void *_tmp_144_rule(Parser *p); -static asdl_seq *_loop1_145_rule(Parser *p); -static void *_tmp_146_rule(Parser *p); +static void *_tmp_145_rule(Parser *p); +static asdl_seq *_loop1_146_rule(Parser *p); static void *_tmp_147_rule(Parser *p); +static void *_tmp_148_rule(Parser *p); // file: statements? $ @@ -10629,10 +10631,10 @@ incorrect_arguments_rule(Parser *p) Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - expr_ty expression_var; + expr_ty a; asdl_seq* for_if_clauses_var; if ( - (expression_var = expression_rule(p)) // expression + (a = expression_rule(p)) // expression && (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses && @@ -10641,7 +10643,7 @@ incorrect_arguments_rule(Parser *p) (_opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] ) { - _res = RAISE_SYNTAX_ERROR ( "Generator expression must be parenthesized" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "Generator expression must be parenthesized" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10687,14 +10689,14 @@ invalid_kwarg_rule(Parser *p) int _mark = p->mark; { // expression '=' Token * _literal; - expr_ty expression_var; + expr_ty a; if ( - (expression_var = expression_rule(p)) // expression + (a = expression_rule(p)) // expression && (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - _res = RAISE_SYNTAX_ERROR ( "expression cannot contain assignment, perhaps you meant \"==\"?" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "expression cannot contain assignment, perhaps you meant \"==\"?" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10729,7 +10731,7 @@ invalid_named_expression_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { - _res = RAISE_SYNTAX_ERROR ( "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10746,6 +10748,7 @@ invalid_named_expression_rule(Parser *p) // invalid_assignment: // | list ':' // | tuple ':' +// | star_named_expression ',' star_named_expressions* ':' // | expression ':' expression ['=' annotated_rhs] // | expression ('=' | augassign) (yield_expr | star_expressions) static void * @@ -10758,14 +10761,14 @@ invalid_assignment_rule(Parser *p) int _mark = p->mark; { // list ':' Token * _literal; - expr_ty list_var; + expr_ty a; if ( - (list_var = list_rule(p)) // list + (a = list_rule(p)) // list && (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - _res = RAISE_SYNTAX_ERROR ( "only single target (not list) can be annotated" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not list) can be annotated" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10776,14 +10779,38 @@ invalid_assignment_rule(Parser *p) } { // tuple ':' Token * _literal; - expr_ty tuple_var; + expr_ty a; if ( - (tuple_var = tuple_rule(p)) // tuple + (a = tuple_rule(p)) // tuple && (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - _res = RAISE_SYNTAX_ERROR ( "only single target (not tuple) can be annotated" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + { // star_named_expression ',' star_named_expressions* ':' + Token * _literal; + Token * _literal_1; + asdl_seq * _loop0_126_var; + expr_ty a; + if ( + (a = star_named_expression_rule(p)) // star_named_expression + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (_loop0_126_var = _loop0_126_rule(p)) // star_named_expressions* + && + (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10796,19 +10823,19 @@ invalid_assignment_rule(Parser *p) Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; expr_ty expression_var; - expr_ty expression_var_1; if ( - (expression_var = expression_rule(p)) // expression + (a = expression_rule(p)) // expression && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (expression_var_1 = expression_rule(p)) // expression + (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_126_rule(p), 1) // ['=' annotated_rhs] + (_opt_var = _tmp_127_rule(p), 1) // ['=' annotated_rhs] ) { - _res = RAISE_SYNTAX_ERROR ( "illegal target for annotation" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "illegal target for annotation" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10818,18 +10845,18 @@ invalid_assignment_rule(Parser *p) p->mark = _mark; } { // expression ('=' | augassign) (yield_expr | star_expressions) - void *_tmp_127_var; void *_tmp_128_var; + void *_tmp_129_var; expr_ty a; if ( (a = expression_rule(p)) // expression && - (_tmp_127_var = _tmp_127_rule(p)) // '=' | augassign + (_tmp_128_var = _tmp_128_rule(p)) // '=' | augassign && - (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions + (_tmp_129_var = _tmp_129_rule(p)) // yield_expr | star_expressions ) { - _res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10874,7 +10901,7 @@ invalid_block_rule(Parser *p) return _res; } -// invalid_comprehension: ('[' | '(' | '{') '*' expression for_if_clauses +// invalid_comprehension: ('[' | '(' | '{') starred_expression for_if_clauses static void * invalid_comprehension_rule(Parser *p) { @@ -10883,22 +10910,19 @@ invalid_comprehension_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // ('[' | '(' | '{') '*' expression for_if_clauses - Token * _literal; - void *_tmp_129_var; - expr_ty expression_var; + { // ('[' | '(' | '{') starred_expression for_if_clauses + void *_tmp_130_var; + expr_ty a; asdl_seq* for_if_clauses_var; if ( - (_tmp_129_var = _tmp_129_rule(p)) // '[' | '(' | '{' - && - (_literal = _PyPegen_expect_token(p, 16)) // token='*' + (_tmp_130_var = _tmp_130_rule(p)) // '[' | '(' | '{' && - (expression_var = expression_rule(p)) // expression + (a = starred_expression_rule(p)) // starred_expression && (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - _res = RAISE_SYNTAX_ERROR ( "iterable unpacking cannot be used in comprehension" ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "iterable unpacking cannot be used in comprehension" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -10923,13 +10947,13 @@ invalid_parameters_rule(Parser *p) void * _res = NULL; int _mark = p->mark; { // param_no_default* (slash_with_default | param_with_default+) param_no_default - asdl_seq * _loop0_130_var; - void *_tmp_131_var; + asdl_seq * _loop0_131_var; + void *_tmp_132_var; arg_ty param_no_default_var; if ( - (_loop0_130_var = _loop0_130_rule(p)) // param_no_default* + (_loop0_131_var = _loop0_131_rule(p)) // param_no_default* && - (_tmp_131_var = _tmp_131_rule(p)) // slash_with_default | param_with_default+ + (_tmp_132_var = _tmp_132_rule(p)) // slash_with_default | param_with_default+ && (param_no_default_var = param_no_default_rule(p)) // param_no_default ) @@ -10959,11 +10983,11 @@ invalid_star_etc_rule(Parser *p) int _mark = p->mark; { // '*' (')' | ',' (')' | '**')) Token * _literal; - void *_tmp_132_var; + void *_tmp_133_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_132_var = _tmp_132_rule(p)) // ')' | ',' (')' | '**') + (_tmp_133_var = _tmp_133_rule(p)) // ')' | ',' (')' | '**') ) { _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); @@ -10991,11 +11015,11 @@ invalid_lambda_star_etc_rule(Parser *p) int _mark = p->mark; { // '*' (':' | ',' (':' | '**')) Token * _literal; - void *_tmp_133_var; + void *_tmp_134_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_133_var = _tmp_133_rule(p)) // ':' | ',' (':' | '**') + (_tmp_134_var = _tmp_134_rule(p)) // ':' | ',' (':' | '**') ) { _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); @@ -11070,7 +11094,7 @@ invalid_del_target_rule(Parser *p) _PyPegen_lookahead(1, del_target_end_rule, p) ) { - _res = RAISE_SYNTAX_ERROR ( "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -11979,12 +12003,12 @@ _loop1_22_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (star_targets '=') - void *_tmp_134_var; + void *_tmp_135_var; while ( - (_tmp_134_var = _tmp_134_rule(p)) // star_targets '=' + (_tmp_135_var = _tmp_135_rule(p)) // star_targets '=' ) { - _res = _tmp_134_var; + _res = _tmp_135_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -12306,12 +12330,12 @@ _loop0_30_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('.' | '...') - void *_tmp_135_var; + void *_tmp_136_var; while ( - (_tmp_135_var = _tmp_135_rule(p)) // '.' | '...' + (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' ) { - _res = _tmp_135_var; + _res = _tmp_136_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -12355,12 +12379,12 @@ _loop1_31_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('.' | '...') - void *_tmp_136_var; + void *_tmp_137_var; while ( - (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' + (_tmp_137_var = _tmp_137_rule(p)) // '.' | '...' ) { - _res = _tmp_136_var; + _res = _tmp_137_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -13937,12 +13961,12 @@ _loop1_67_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('@' named_expression NEWLINE) - void *_tmp_137_var; + void *_tmp_138_var; while ( - (_tmp_137_var = _tmp_137_rule(p)) // '@' named_expression NEWLINE + (_tmp_138_var = _tmp_138_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_137_var; + _res = _tmp_138_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -14110,12 +14134,12 @@ _loop1_71_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' star_expression) - void *_tmp_138_var; + void *_tmp_139_var; while ( - (_tmp_138_var = _tmp_138_rule(p)) // ',' star_expression + (_tmp_139_var = _tmp_139_rule(p)) // ',' star_expression ) { - _res = _tmp_138_var; + _res = _tmp_139_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -14248,12 +14272,12 @@ _loop1_74_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' expression) - void *_tmp_139_var; + void *_tmp_140_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) // ',' expression + (_tmp_140_var = _tmp_140_rule(p)) // ',' expression ) { - _res = _tmp_139_var; + _res = _tmp_140_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -15015,12 +15039,12 @@ _loop1_89_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('or' conjunction) - void *_tmp_140_var; + void *_tmp_141_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) // 'or' conjunction + (_tmp_141_var = _tmp_141_rule(p)) // 'or' conjunction ) { - _res = _tmp_140_var; + _res = _tmp_141_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -15068,12 +15092,12 @@ _loop1_90_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('and' inversion) - void *_tmp_141_var; + void *_tmp_142_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) // 'and' inversion + (_tmp_142_var = _tmp_142_rule(p)) // 'and' inversion ) { - _res = _tmp_141_var; + _res = _tmp_142_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -15723,12 +15747,12 @@ _loop0_105_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('if' disjunction) - void *_tmp_142_var; + void *_tmp_143_var; while ( - (_tmp_142_var = _tmp_142_rule(p)) // 'if' disjunction + (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction ) { - _res = _tmp_142_var; + _res = _tmp_143_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -15772,12 +15796,12 @@ _loop0_106_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // ('if' disjunction) - void *_tmp_143_var; + void *_tmp_144_var; while ( - (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction + (_tmp_144_var = _tmp_144_rule(p)) // 'if' disjunction ) { - _res = _tmp_143_var; + _res = _tmp_144_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -16225,12 +16249,12 @@ _loop0_117_rule(Parser *p) ssize_t _children_capacity = 1; ssize_t _n = 0; { // (',' star_target) - void *_tmp_144_var; + void *_tmp_145_var; while ( - (_tmp_144_var = _tmp_144_rule(p)) // ',' star_target + (_tmp_145_var = _tmp_145_rule(p)) // ',' star_target ) { - _res = _tmp_144_var; + _res = _tmp_145_var; if (_n == _children_capacity) { _children_capacity *= 2; _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -16577,9 +16601,58 @@ _tmp_125_rule(Parser *p) return _res; } -// _tmp_126: '=' annotated_rhs +// _loop0_126: star_named_expressions +static asdl_seq * +_loop0_126_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // star_named_expressions + asdl_seq* star_named_expressions_var; + while ( + (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions + ) + { + _res = star_named_expressions_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_126"); + PyMem_Free(_children); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_126_type, _seq); + return _seq; +} + +// _tmp_127: '=' annotated_rhs static void * -_tmp_126_rule(Parser *p) +_tmp_127_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16605,9 +16678,9 @@ _tmp_126_rule(Parser *p) return _res; } -// _tmp_127: '=' | augassign +// _tmp_128: '=' | augassign static void * -_tmp_127_rule(Parser *p) +_tmp_128_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16641,9 +16714,9 @@ _tmp_127_rule(Parser *p) return _res; } -// _tmp_128: yield_expr | star_expressions +// _tmp_129: yield_expr | star_expressions static void * -_tmp_128_rule(Parser *p) +_tmp_129_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16677,9 +16750,9 @@ _tmp_128_rule(Parser *p) return _res; } -// _tmp_129: '[' | '(' | '{' +// _tmp_130: '[' | '(' | '{' static void * -_tmp_129_rule(Parser *p) +_tmp_130_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16724,9 +16797,9 @@ _tmp_129_rule(Parser *p) return _res; } -// _loop0_130: param_no_default +// _loop0_131: param_no_default static asdl_seq * -_loop0_130_rule(Parser *p) +_loop0_131_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16763,19 +16836,19 @@ _loop0_130_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_130"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_131"); PyMem_Free(_children); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_130_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_131_type, _seq); return _seq; } -// _tmp_131: slash_with_default | param_with_default+ +// _tmp_132: slash_with_default | param_with_default+ static void * -_tmp_131_rule(Parser *p) +_tmp_132_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16794,12 +16867,12 @@ _tmp_131_rule(Parser *p) p->mark = _mark; } { // param_with_default+ - asdl_seq * _loop1_145_var; + asdl_seq * _loop1_146_var; if ( - (_loop1_145_var = _loop1_145_rule(p)) // param_with_default+ + (_loop1_146_var = _loop1_146_rule(p)) // param_with_default+ ) { - _res = _loop1_145_var; + _res = _loop1_146_var; goto done; } p->mark = _mark; @@ -16809,9 +16882,9 @@ _tmp_131_rule(Parser *p) return _res; } -// _tmp_132: ')' | ',' (')' | '**') +// _tmp_133: ')' | ',' (')' | '**') static void * -_tmp_132_rule(Parser *p) +_tmp_133_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16831,14 +16904,14 @@ _tmp_132_rule(Parser *p) } { // ',' (')' | '**') Token * _literal; - void *_tmp_146_var; + void *_tmp_147_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_146_var = _tmp_146_rule(p)) // ')' | '**' + (_tmp_147_var = _tmp_147_rule(p)) // ')' | '**' ) { - _res = _PyPegen_dummy_name(p, _literal, _tmp_146_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_147_var); goto done; } p->mark = _mark; @@ -16848,9 +16921,9 @@ _tmp_132_rule(Parser *p) return _res; } -// _tmp_133: ':' | ',' (':' | '**') +// _tmp_134: ':' | ',' (':' | '**') static void * -_tmp_133_rule(Parser *p) +_tmp_134_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16870,14 +16943,14 @@ _tmp_133_rule(Parser *p) } { // ',' (':' | '**') Token * _literal; - void *_tmp_147_var; + void *_tmp_148_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_147_var = _tmp_147_rule(p)) // ':' | '**' + (_tmp_148_var = _tmp_148_rule(p)) // ':' | '**' ) { - _res = _PyPegen_dummy_name(p, _literal, _tmp_147_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_148_var); goto done; } p->mark = _mark; @@ -16887,9 +16960,9 @@ _tmp_133_rule(Parser *p) return _res; } -// _tmp_134: star_targets '=' +// _tmp_135: star_targets '=' static void * -_tmp_134_rule(Parser *p) +_tmp_135_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16919,9 +16992,9 @@ _tmp_134_rule(Parser *p) return _res; } -// _tmp_135: '.' | '...' +// _tmp_136: '.' | '...' static void * -_tmp_135_rule(Parser *p) +_tmp_136_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16955,9 +17028,9 @@ _tmp_135_rule(Parser *p) return _res; } -// _tmp_136: '.' | '...' +// _tmp_137: '.' | '...' static void * -_tmp_136_rule(Parser *p) +_tmp_137_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -16991,9 +17064,9 @@ _tmp_136_rule(Parser *p) return _res; } -// _tmp_137: '@' named_expression NEWLINE +// _tmp_138: '@' named_expression NEWLINE static void * -_tmp_137_rule(Parser *p) +_tmp_138_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17026,9 +17099,9 @@ _tmp_137_rule(Parser *p) return _res; } -// _tmp_138: ',' star_expression +// _tmp_139: ',' star_expression static void * -_tmp_138_rule(Parser *p) +_tmp_139_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17058,9 +17131,9 @@ _tmp_138_rule(Parser *p) return _res; } -// _tmp_139: ',' expression +// _tmp_140: ',' expression static void * -_tmp_139_rule(Parser *p) +_tmp_140_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17090,9 +17163,9 @@ _tmp_139_rule(Parser *p) return _res; } -// _tmp_140: 'or' conjunction +// _tmp_141: 'or' conjunction static void * -_tmp_140_rule(Parser *p) +_tmp_141_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17122,9 +17195,9 @@ _tmp_140_rule(Parser *p) return _res; } -// _tmp_141: 'and' inversion +// _tmp_142: 'and' inversion static void * -_tmp_141_rule(Parser *p) +_tmp_142_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17154,9 +17227,9 @@ _tmp_141_rule(Parser *p) return _res; } -// _tmp_142: 'if' disjunction +// _tmp_143: 'if' disjunction static void * -_tmp_142_rule(Parser *p) +_tmp_143_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17186,9 +17259,9 @@ _tmp_142_rule(Parser *p) return _res; } -// _tmp_143: 'if' disjunction +// _tmp_144: 'if' disjunction static void * -_tmp_143_rule(Parser *p) +_tmp_144_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17218,9 +17291,9 @@ _tmp_143_rule(Parser *p) return _res; } -// _tmp_144: ',' star_target +// _tmp_145: ',' star_target static void * -_tmp_144_rule(Parser *p) +_tmp_145_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17250,9 +17323,9 @@ _tmp_144_rule(Parser *p) return _res; } -// _loop1_145: param_with_default +// _loop1_146: param_with_default static asdl_seq * -_loop1_145_rule(Parser *p) +_loop1_146_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17293,19 +17366,19 @@ _loop1_145_rule(Parser *p) } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); if (!_seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_145"); + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_146"); PyMem_Free(_children); return NULL; } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_145_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_146_type, _seq); return _seq; } -// _tmp_146: ')' | '**' +// _tmp_147: ')' | '**' static void * -_tmp_146_rule(Parser *p) +_tmp_147_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -17339,9 +17412,9 @@ _tmp_146_rule(Parser *p) return _res; } -// _tmp_147: ':' | '**' +// _tmp_148: ':' | '**' static void * -_tmp_147_rule(Parser *p) +_tmp_148_rule(Parser *p) { if (p->error_indicator) { return NULL; diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 5f8c862c1f88be..083088bd9657bd 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -380,48 +380,57 @@ tokenizer_error(Parser *p) } void * -_PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const char *errmsg, ...) +_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...) +{ + Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1]; + int col_offset; + if (t->col_offset == -1) { + col_offset = Py_SAFE_DOWNCAST(p->tok->cur - p->tok->buf, + intptr_t, int); + } else { + col_offset = t->col_offset + 1; + } + + va_list va; + va_start(va, errmsg); + _PyPegen_raise_error_known_location(p, errtype, t->lineno, + col_offset, errmsg, va); + va_end(va); + + return NULL; +} + + +void * +_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, + int lineno, int col_offset, + const char *errmsg, va_list va) { PyObject *value = NULL; PyObject *errstr = NULL; - PyObject *loc = NULL; + PyObject *error_line = NULL; PyObject *tmp = NULL; - Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1]; - Py_ssize_t col_number = !with_col_number; - va_list va; p->error_indicator = 1; - va_start(va, errmsg); errstr = PyUnicode_FromFormatV(errmsg, va); - va_end(va); if (!errstr) { goto error; } if (p->start_rule == Py_file_input) { - loc = PyErr_ProgramTextObject(p->tok->filename, t->lineno); + error_line = PyErr_ProgramTextObject(p->tok->filename, lineno); } - if (!loc) { - loc = get_error_line(p->tok->buf, p->start_rule == Py_file_input); - } - - if (loc && with_col_number) { - int col_offset; - if (t->col_offset == -1) { - col_offset = Py_SAFE_DOWNCAST(p->tok->cur - p->tok->buf, - intptr_t, int); - } else { - col_offset = t->col_offset + 1; + if (!error_line) { + error_line = get_error_line(p->tok->buf, p->start_rule == Py_file_input); + if (!error_line) { + goto error; } - col_number = byte_offset_to_character_offset(loc, col_offset); - } - else if (!loc) { - Py_INCREF(Py_None); - loc = Py_None; } - tmp = Py_BuildValue("(OiiN)", p->tok->filename, t->lineno, col_number, loc); + int col_number = byte_offset_to_character_offset(error_line, col_offset); + + tmp = Py_BuildValue("(OiiN)", p->tok->filename, lineno, col_number, error_line); if (!tmp) { goto error; } @@ -438,7 +447,7 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const ch error: Py_XDECREF(errstr); - Py_XDECREF(loc); + Py_XDECREF(error_line); return NULL; } diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index b55a652ac8060d..e5b1b757bd894b 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -127,15 +127,32 @@ expr_ty _PyPegen_name_token(Parser *p); expr_ty _PyPegen_number_token(Parser *p); void *_PyPegen_string_token(Parser *p); const char *_PyPegen_get_expr_name(expr_ty); -void *_PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const char *errmsg, ...); +void *_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...); +void *_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, + int lineno, int col_offset, + const char *errmsg, va_list va); void *_PyPegen_dummy_name(Parser *p, ...); +Py_LOCAL_INLINE(void *) +RAISE_ERROR_KNOWN_LOCATION(Parser *p, PyObject *errtype, int lineno, + int col_offset, const char *errmsg, ...) +{ + va_list va; + va_start(va, errmsg); + _PyPegen_raise_error_known_location(p, errtype, lineno, col_offset + 1, + errmsg, va); + va_end(va); + return NULL; +} + + #define UNUSED(expr) do { (void)(expr); } while (0) #define EXTRA_EXPR(head, tail) head->lineno, head->col_offset, tail->end_lineno, tail->end_col_offset, p->arena #define EXTRA _start_lineno, _start_col_offset, _end_lineno, _end_col_offset, p->arena -#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 1, msg, ##__VA_ARGS__) -#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, 1, msg, ##__VA_ARGS__) -#define RAISE_SYNTAX_ERROR_NO_COL_OFFSET(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 0, msg, ##__VA_ARGS__) +#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, msg, ##__VA_ARGS__) +#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, msg, ##__VA_ARGS__) +#define RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, msg, ...) \ + RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, a->lineno, a->col_offset, msg, ##__VA_ARGS__) Py_LOCAL_INLINE(void *) CHECK_CALL(Parser *p, void *result) From 97e1568325e4d8eff2fc80eeb174b3f3e5d1c350 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 14 May 2020 00:31:31 +0200 Subject: [PATCH 088/115] bpo-38787: Fix Argument Clinic defining_class_converter (GH-20074) Don't hardcode defining_class parameter name to "cls": * Define CConverter.set_template_dict(): do nothing by default * CLanguage.render_function() now calls set_template_dict() on all converters. --- Tools/clinic/clinic.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index 281a749a935cc4..b07ffdd928f154 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -724,7 +724,7 @@ def output_templates(self, f): parser_prototype_def_class = normalize_snippet(""" static PyObject * - {c_basename}({self_type}{self_name}, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) + {c_basename}({self_type}{self_name}, PyTypeObject *{defining_class_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) """) # parser_body_fields remembers the fields passed in to the @@ -1305,7 +1305,8 @@ def render_function(self, clinic, f): template_dict['docstring'] = self.docstring_for_c_string(f) template_dict['self_name'] = template_dict['self_type'] = template_dict['self_type_check'] = '' - f_self.converter.set_template_dict(template_dict) + for converter in converters: + converter.set_template_dict(template_dict) f.return_converter.render(f, data) template_dict['impl_return_type'] = f.return_converter.type @@ -2698,6 +2699,10 @@ def parse_arg(self, argname, displayname): """.format(argname=argname, paramname=self.name, cast=cast) return None + def set_template_dict(self, template_dict): + pass + + type_checks = { '&PyLong_Type': ('PyLong_Check', 'int'), '&PyTuple_Type': ('PyTuple_Check', 'tuple'), From d72ea605218bbee6ae46648997d9bb76d0fba460 Mon Sep 17 00:00:00 2001 From: Michael Graczyk Date: Wed, 13 May 2020 17:41:57 -0500 Subject: [PATCH 089/115] issue-25872: Fix KeyError using linecache from multiple threads (GH-18007) The crash that this fixes occurs when using traceback and other modules from multiple threads; del cache[filename] can raise a KeyError. --- Lib/linecache.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/linecache.py b/Lib/linecache.py index ddd0abf2cf01d9..fa5dbd09eab869 100644 --- a/Lib/linecache.py +++ b/Lib/linecache.py @@ -71,10 +71,10 @@ def checkcache(filename=None): try: stat = os.stat(fullname) except OSError: - del cache[filename] + cache.pop(filename, None) continue if size != stat.st_size or mtime != stat.st_mtime: - del cache[filename] + cache.pop(filename, None) def updatecache(filename, module_globals=None): @@ -84,7 +84,7 @@ def updatecache(filename, module_globals=None): if filename in cache: if len(cache[filename]) != 1: - del cache[filename] + cache.pop(filename, None) if not filename or (filename.startswith('<') and filename.endswith('>')): return [] From d6fb53fe42d83a10f1372dd92ffaa6a01d2feffb Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 14 May 2020 01:11:54 +0200 Subject: [PATCH 090/115] bpo-39465: Remove _PyUnicode_ClearStaticStrings() from C API (GH-20078) Remove the _PyUnicode_ClearStaticStrings() function from the C API. Make the function fully private (declare it with "static"). --- Doc/whatsnew/3.9.rst | 3 +++ Include/cpython/object.h | 2 +- Include/cpython/unicodeobject.h | 2 -- .../next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst | 1 + Objects/unicodeobject.c | 6 +++--- 5 files changed, 8 insertions(+), 6 deletions(-) create mode 100644 Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index c57d702dce8675..2fec790fe3a639 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -964,3 +964,6 @@ Removed * ``PyTuple_ClearFreeList()`` * ``PyUnicode_ClearFreeList()``: the Unicode free list has been removed in Python 3.3. + +* Remove ``_PyUnicode_ClearStaticStrings()`` function. + (Contributed by Victor Stinner in :issue:`39465`.) diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 8bf05a32711835..444f832f5bd8d3 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -36,7 +36,7 @@ PyAPI_FUNC(Py_ssize_t) _Py_GetRefTotal(void); PyId_foo is a static variable, either on block level or file level. On first usage, the string "foo" is interned, and the structures are linked. On interpreter - shutdown, all strings are released (through _PyUnicode_ClearStaticStrings). + shutdown, all strings are released. Alternatively, _Py_static_string allows choosing the variable name. _PyUnicode_FromId returns a borrowed reference to the interned string. diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 94326876292b63..4fd674ffea36ea 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -1215,8 +1215,6 @@ Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) PyUnicode_AsUnicodeCopy( /* Return an interned Unicode object for an Identifier; may fail if there is no memory.*/ PyAPI_FUNC(PyObject*) _PyUnicode_FromId(_Py_Identifier*); -/* Clear all static strings. */ -PyAPI_FUNC(void) _PyUnicode_ClearStaticStrings(void); /* Fast equality check when the inputs are known to be exact unicode types and where the hash values are equal (i.e. a very probable match) */ diff --git a/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst b/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst new file mode 100644 index 00000000000000..a08c3da5660455 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst @@ -0,0 +1 @@ +Remove the ``_PyUnicode_ClearStaticStrings()`` function from the C API. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 826298c23a924c..34b747ec7bb7ee 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -2289,8 +2289,8 @@ _PyUnicode_FromId(_Py_Identifier *id) return id->object; } -void -_PyUnicode_ClearStaticStrings() +static void +unicode_clear_static_strings(void) { _Py_Identifier *tmp, *s = static_strings; while (s) { @@ -16196,7 +16196,7 @@ _PyUnicode_Fini(PyThreadState *tstate) Py_CLEAR(unicode_latin1[i]); } #endif - _PyUnicode_ClearStaticStrings(); + unicode_clear_static_strings(); } _PyUnicode_FiniEncodings(tstate); From 75cd8e48c62c97fdb9d9a94fd2335be06084471d Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Wed, 13 May 2020 16:18:27 -0700 Subject: [PATCH 091/115] bpo-29587: Make gen.throw() chain exceptions with yield from (GH-19858) The previous commits on bpo-29587 got exception chaining working with gen.throw() in the `yield` case. This patch also gets the `yield from` case working. As a consequence, implicit exception chaining now also works in the asyncio scenario of awaiting on a task when an exception is already active. Tests are included for both the asyncio case and the pure generator-only case. --- Lib/test/test_asyncio/test_tasks.py | 27 +++++++++++++++++++++++++++ Lib/test/test_generators.py | 19 ++++++++++++++++++- Objects/genobject.c | 22 ++++++++++++---------- 3 files changed, 57 insertions(+), 11 deletions(-) diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 68f3b8cce9f65d..6eb6b46ec8af75 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -466,6 +466,33 @@ async def inner2(): t = outer() self.assertEqual(self.loop.run_until_complete(t), 1042) + def test_exception_chaining_after_await(self): + # Test that when awaiting on a task when an exception is already + # active, if the task raises an exception it will be chained + # with the original. + loop = asyncio.new_event_loop() + self.set_event_loop(loop) + + async def raise_error(): + raise ValueError + + async def run(): + try: + raise KeyError(3) + except Exception as exc: + task = self.new_task(loop, raise_error()) + try: + await task + except Exception as exc: + self.assertEqual(type(exc), ValueError) + chained = exc.__context__ + self.assertEqual((type(chained), chained.args), + (KeyError, (3,))) + + task = self.new_task(loop, run()) + loop.run_until_complete(task) + loop.close() + def test_cancel(self): def gen(): diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index e0478011996807..1081107ee64ace 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -318,7 +318,7 @@ def g(): class GeneratorThrowTest(unittest.TestCase): - def test_exception_context_set(self): + def test_exception_context_with_yield(self): def f(): try: raise KeyError('a') @@ -332,6 +332,23 @@ def f(): context = cm.exception.__context__ self.assertEqual((type(context), context.args), (KeyError, ('a',))) + def test_exception_context_with_yield_from(self): + def f(): + yield + + def g(): + try: + raise KeyError('a') + except Exception: + yield from f() + + gen = g() + gen.send(None) + with self.assertRaises(ValueError) as cm: + gen.throw(ValueError) + context = cm.exception.__context__ + self.assertEqual((type(context), context.args), (KeyError, ('a',))) + def test_throw_after_none_exc_type(self): def g(): try: diff --git a/Objects/genobject.c b/Objects/genobject.c index 5b253edfdcd0f6..fb01e581f8ae15 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -217,6 +217,18 @@ gen_send_ex(PyGenObject *gen, PyObject *arg, int exc, int closing) assert(f->f_back == NULL); f->f_back = tstate->frame; + _PyErr_StackItem *gi_exc_state = &gen->gi_exc_state; + if (exc && gi_exc_state->exc_type != NULL && + gi_exc_state->exc_type != Py_None) + { + Py_INCREF(gi_exc_state->exc_type); + Py_XINCREF(gi_exc_state->exc_value); + Py_XINCREF(gi_exc_state->exc_traceback); + _PyErr_ChainExceptions(gi_exc_state->exc_type, + gi_exc_state->exc_value, + gi_exc_state->exc_traceback); + } + gen->gi_running = 1; gen->gi_exc_state.previous_item = tstate->exc_info; tstate->exc_info = &gen->gi_exc_state; @@ -512,16 +524,6 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, } PyErr_Restore(typ, val, tb); - - _PyErr_StackItem *gi_exc_state = &gen->gi_exc_state; - if (gi_exc_state->exc_type != NULL && gi_exc_state->exc_type != Py_None) { - Py_INCREF(gi_exc_state->exc_type); - Py_XINCREF(gi_exc_state->exc_value); - Py_XINCREF(gi_exc_state->exc_traceback); - _PyErr_ChainExceptions(gi_exc_state->exc_type, - gi_exc_state->exc_value, - gi_exc_state->exc_traceback); - } return gen_send_ex(gen, Py_None, 1, 0); failed_throw: From 3d17c045b4c3d09b72bbd95ed78af1ae6f0d98d2 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 14 May 2020 01:48:38 +0200 Subject: [PATCH 092/115] bpo-40521: Add PyInterpreterState.unicode (GH-20081) Move PyInterpreterState.fs_codec into a new PyInterpreterState.unicode structure. Give a name to the fs_codec structure and use this structure in unicodeobject.c. --- Include/internal/pycore_interp.h | 22 +++++++---- Modules/_io/textio.c | 2 +- Objects/unicodeobject.c | 64 ++++++++++++++++---------------- 3 files changed, 48 insertions(+), 40 deletions(-) diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 26e7a473a12dc6..f04ea330d04571 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -51,6 +51,19 @@ struct _ceval_state { #endif }; +/* fs_codec.encoding is initialized to NULL. + Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */ +struct _Py_unicode_fs_codec { + char *encoding; // Filesystem encoding (encoded to UTF-8) + int utf8; // encoding=="utf-8"? + char *errors; // Filesystem errors (encoded to UTF-8) + _Py_error_handler error_handler; +}; + +struct _Py_unicode_state { + struct _Py_unicode_fs_codec fs_codec; +}; + /* interpreter state */ @@ -97,14 +110,7 @@ struct _is { PyObject *codec_error_registry; int codecs_initialized; - /* fs_codec.encoding is initialized to NULL. - Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */ - struct { - char *encoding; /* Filesystem encoding (encoded to UTF-8) */ - int utf8; /* encoding=="utf-8"? */ - char *errors; /* Filesystem errors (encoded to UTF-8) */ - _Py_error_handler error_handler; - } fs_codec; + struct _Py_unicode_state unicode; PyConfig config; #ifdef HAVE_DLOPEN diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c index 1abc9ca6f206aa..f2c72ebd516589 100644 --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -1007,7 +1007,7 @@ io_check_errors(PyObject *errors) /* Avoid calling PyCodec_LookupError() before the codec registry is ready: before_PyUnicode_InitEncodings() is called. */ - if (!interp->fs_codec.encoding) { + if (!interp->unicode.fs_codec.encoding) { return 0; } diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 34b747ec7bb7ee..ea46a44bf5faac 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -463,7 +463,7 @@ unicode_check_encoding_errors(const char *encoding, const char *errors) /* Avoid calling _PyCodec_Lookup() and PyCodec_LookupError() before the codec registry is ready: before_PyUnicode_InitEncodings() is called. */ - if (!interp->fs_codec.encoding) { + if (!interp->unicode.fs_codec.encoding) { return 0; } @@ -3650,16 +3650,17 @@ PyObject * PyUnicode_EncodeFSDefault(PyObject *unicode) { PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->fs_codec.utf8) { + struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec; + if (fs_codec->utf8) { return unicode_encode_utf8(unicode, - interp->fs_codec.error_handler, - interp->fs_codec.errors); + fs_codec->error_handler, + fs_codec->errors); } #ifndef _Py_FORCE_UTF8_FS_ENCODING - else if (interp->fs_codec.encoding) { + else if (fs_codec->encoding) { return PyUnicode_AsEncodedString(unicode, - interp->fs_codec.encoding, - interp->fs_codec.errors); + fs_codec->encoding, + fs_codec->errors); } #endif else { @@ -3886,17 +3887,18 @@ PyObject* PyUnicode_DecodeFSDefaultAndSize(const char *s, Py_ssize_t size) { PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->fs_codec.utf8) { + struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec; + if (fs_codec->utf8) { return unicode_decode_utf8(s, size, - interp->fs_codec.error_handler, - interp->fs_codec.errors, + fs_codec->error_handler, + fs_codec->errors, NULL); } #ifndef _Py_FORCE_UTF8_FS_ENCODING - else if (interp->fs_codec.encoding) { + else if (fs_codec->encoding) { return PyUnicode_Decode(s, size, - interp->fs_codec.encoding, - interp->fs_codec.errors); + fs_codec->encoding, + fs_codec->errors); } #endif else { @@ -16071,16 +16073,17 @@ init_fs_codec(PyInterpreterState *interp) return -1; } - PyMem_RawFree(interp->fs_codec.encoding); - interp->fs_codec.encoding = encoding; + struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec; + PyMem_RawFree(fs_codec->encoding); + fs_codec->encoding = encoding; /* encoding has been normalized by init_fs_encoding() */ - interp->fs_codec.utf8 = (strcmp(encoding, "utf-8") == 0); - PyMem_RawFree(interp->fs_codec.errors); - interp->fs_codec.errors = errors; - interp->fs_codec.error_handler = error_handler; + fs_codec->utf8 = (strcmp(encoding, "utf-8") == 0); + PyMem_RawFree(fs_codec->errors); + fs_codec->errors = errors; + fs_codec->error_handler = error_handler; #ifdef _Py_FORCE_UTF8_FS_ENCODING - assert(interp->fs_codec.utf8 == 1); + assert(fs_codec->utf8 == 1); #endif /* At this point, PyUnicode_EncodeFSDefault() and @@ -16089,8 +16092,8 @@ init_fs_codec(PyInterpreterState *interp) /* Set Py_FileSystemDefaultEncoding and Py_FileSystemDefaultEncodeErrors global configuration variables. */ - if (_Py_SetFileSystemEncoding(interp->fs_codec.encoding, - interp->fs_codec.errors) < 0) { + if (_Py_SetFileSystemEncoding(fs_codec->encoding, + fs_codec->errors) < 0) { PyErr_NoMemory(); return -1; } @@ -16133,15 +16136,14 @@ _PyUnicode_InitEncodings(PyThreadState *tstate) static void -_PyUnicode_FiniEncodings(PyThreadState *tstate) +_PyUnicode_FiniEncodings(struct _Py_unicode_fs_codec *fs_codec) { - PyInterpreterState *interp = tstate->interp; - PyMem_RawFree(interp->fs_codec.encoding); - interp->fs_codec.encoding = NULL; - interp->fs_codec.utf8 = 0; - PyMem_RawFree(interp->fs_codec.errors); - interp->fs_codec.errors = NULL; - interp->fs_codec.error_handler = _Py_ERROR_UNKNOWN; + PyMem_RawFree(fs_codec->encoding); + fs_codec->encoding = NULL; + fs_codec->utf8 = 0; + PyMem_RawFree(fs_codec->errors); + fs_codec->errors = NULL; + fs_codec->error_handler = _Py_ERROR_UNKNOWN; } @@ -16199,7 +16201,7 @@ _PyUnicode_Fini(PyThreadState *tstate) unicode_clear_static_strings(); } - _PyUnicode_FiniEncodings(tstate); + _PyUnicode_FiniEncodings(&tstate->interp->unicode.fs_codec); } From 6f2f475d5a2cd7675dce844f3af436ba919ef92b Mon Sep 17 00:00:00 2001 From: Arkadiusz Hiler Date: Thu, 14 May 2020 03:53:26 +0300 Subject: [PATCH 093/115] bpo-40597: email: Use CTE if lines are longer than max_line_length consistently (gh-20038) raw_data_manager (default for EmailPolicy, EmailMessage) does correct wrapping of 'text' parts as long as the message contains characters outside of 7bit US-ASCII set: base64 or qp Content-Transfer-Encoding is applied if the lines would be too long without it. It did not, however, do this for ascii-only text, which could result in lines that were longer than policy.max_line_length or even the rfc 998 maximum. This changeset fixes the heuristic so that if lines are longer than policy.max_line_length, it will always apply a content-transfer-encoding so that the lines are wrapped correctly. --- Lib/email/contentmanager.py | 14 +++++++------- Lib/test/test_email/test_contentmanager.py | 15 +++++++++++++++ .../2020-05-11-19-17-23.bpo-40597.4SGfgm.rst | 1 + 3 files changed, 23 insertions(+), 7 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst diff --git a/Lib/email/contentmanager.py b/Lib/email/contentmanager.py index b904ded94c92ef..2b4b8757f46f62 100644 --- a/Lib/email/contentmanager.py +++ b/Lib/email/contentmanager.py @@ -146,13 +146,13 @@ def embedded_body(lines): return linesep.join(lines) + linesep def normal_body(lines): return b'\n'.join(lines) + b'\n' if cte==None: # Use heuristics to decide on the "best" encoding. - try: - return '7bit', normal_body(lines).decode('ascii') - except UnicodeDecodeError: - pass - if (policy.cte_type == '8bit' and - max(len(x) for x in lines) <= policy.max_line_length): - return '8bit', normal_body(lines).decode('ascii', 'surrogateescape') + if max(len(x) for x in lines) <= policy.max_line_length: + try: + return '7bit', normal_body(lines).decode('ascii') + except UnicodeDecodeError: + pass + if policy.cte_type == '8bit': + return '8bit', normal_body(lines).decode('ascii', 'surrogateescape') sniff = embedded_body(lines[:10]) sniff_qp = quoprimime.body_encode(sniff.decode('latin-1'), policy.max_line_length) diff --git a/Lib/test/test_email/test_contentmanager.py b/Lib/test/test_email/test_contentmanager.py index 169058eac83da3..64dca2d017e629 100644 --- a/Lib/test/test_email/test_contentmanager.py +++ b/Lib/test/test_email/test_contentmanager.py @@ -329,6 +329,21 @@ def test_set_text_charset_latin_1(self): self.assertEqual(m.get_payload(decode=True).decode('utf-8'), content) self.assertEqual(m.get_content(), content) + def test_set_text_plain_long_line_heuristics(self): + m = self._make_message() + content = ("Simple but long message that is over 78 characters" + " long to force transfer encoding.\n") + raw_data_manager.set_content(m, content) + self.assertEqual(str(m), textwrap.dedent("""\ + Content-Type: text/plain; charset="utf-8" + Content-Transfer-Encoding: quoted-printable + + Simple but long message that is over 78 characters long to = + force transfer encoding. + """)) + self.assertEqual(m.get_payload(decode=True).decode('utf-8'), content) + self.assertEqual(m.get_content(), content) + def test_set_text_short_line_minimal_non_ascii_heuristics(self): m = self._make_message() content = "et là il est monté sur moi et il commence à m'éto.\n" diff --git a/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst new file mode 100644 index 00000000000000..1b9fe609c25b71 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst @@ -0,0 +1 @@ +If text content lines are longer than policy.max_line_length, always use a content-encoding to make sure they are wrapped. From 7443d42021d433da0497f8ba651daa47e7dc1991 Mon Sep 17 00:00:00 2001 From: Hai Shi Date: Thu, 14 May 2020 09:22:30 +0800 Subject: [PATCH 094/115] bpo-40275: Import locale module lazily in gettext (GH-19905) --- Lib/gettext.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/Lib/gettext.py b/Lib/gettext.py index b98f501884b75a..77b67aef4204c9 100644 --- a/Lib/gettext.py +++ b/Lib/gettext.py @@ -46,7 +46,6 @@ # find this format documented anywhere. -import locale import os import re import sys @@ -210,6 +209,7 @@ def func(n): def _expand_lang(loc): + import locale loc = locale.normalize(loc) COMPONENT_CODESET = 1 << 0 COMPONENT_TERRITORY = 1 << 1 @@ -278,6 +278,7 @@ def lgettext(self, message): import warnings warnings.warn('lgettext() is deprecated, use gettext() instead', DeprecationWarning, 2) + import locale if self._fallback: with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'.*\blgettext\b.*', @@ -299,6 +300,7 @@ def lngettext(self, msgid1, msgid2, n): import warnings warnings.warn('lngettext() is deprecated, use ngettext() instead', DeprecationWarning, 2) + import locale if self._fallback: with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'.*\blngettext\b.*', @@ -462,6 +464,7 @@ def lgettext(self, message): import warnings warnings.warn('lgettext() is deprecated, use gettext() instead', DeprecationWarning, 2) + import locale missing = object() tmsg = self._catalog.get(message, missing) if tmsg is missing: @@ -476,6 +479,7 @@ def lngettext(self, msgid1, msgid2, n): import warnings warnings.warn('lngettext() is deprecated, use ngettext() instead', DeprecationWarning, 2) + import locale try: tmsg = self._catalog[(msgid1, self.plural(n))] except KeyError: @@ -668,6 +672,7 @@ def ldgettext(domain, message): import warnings warnings.warn('ldgettext() is deprecated, use dgettext() instead', DeprecationWarning, 2) + import locale codeset = _localecodesets.get(domain) try: with warnings.catch_warnings(): @@ -695,6 +700,7 @@ def ldngettext(domain, msgid1, msgid2, n): import warnings warnings.warn('ldngettext() is deprecated, use dngettext() instead', DeprecationWarning, 2) + import locale codeset = _localecodesets.get(domain) try: with warnings.catch_warnings(): From e77d428856fbd339faee44ff47214eda5fb51d57 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lum=C3=ADr=20=27Frenzy=27=20Balhar?= Date: Thu, 14 May 2020 16:17:22 +0200 Subject: [PATCH 095/115] bpo-40495: compileall option to hardlink duplicate pyc files (GH-19901) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit compileall is now able to use hardlinks to prevent duplicates in a case when .pyc files for different optimization levels have the same content. Co-authored-by: Miro Hrončok Co-authored-by: Victor Stinner --- Doc/library/compileall.rst | 21 +- Doc/whatsnew/3.9.rst | 10 + Lib/compileall.py | 42 +++- Lib/test/test_compileall.py | 224 +++++++++++++++++- Misc/ACKS | 1 + .../2020-05-04-11-20-49.bpo-40495.TyTc2O.rst | 2 + 6 files changed, 285 insertions(+), 15 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst index b1ae9d60e8ae14..a511c7eda265b2 100644 --- a/Doc/library/compileall.rst +++ b/Doc/library/compileall.rst @@ -113,6 +113,11 @@ compile Python sources. Ignore symlinks pointing outside the given directory. +.. cmdoption:: --hardlink-dupes + + If two ``.pyc`` files with different optimization level have + the same content, use hard links to consolidate duplicate files. + .. versionchanged:: 3.2 Added the ``-i``, ``-b`` and ``-h`` options. @@ -125,7 +130,7 @@ compile Python sources. Added the ``--invalidation-mode`` option. .. versionchanged:: 3.9 - Added the ``-s``, ``-p``, ``-e`` options. + Added the ``-s``, ``-p``, ``-e`` and ``--hardlink-dupes`` options. Raised the default recursion limit from 10 to :py:func:`sys.getrecursionlimit()`. Added the possibility to specify the ``-o`` option multiple times. @@ -143,7 +148,7 @@ runtime. Public functions ---------------- -.. function:: compile_dir(dir, maxlevels=sys.getrecursionlimit(), ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, workers=1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None) +.. function:: compile_dir(dir, maxlevels=sys.getrecursionlimit(), ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, workers=1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None, hardlink_dupes=False) Recursively descend the directory tree named by *dir*, compiling all :file:`.py` files along the way. Return a true value if all the files compiled successfully, @@ -193,6 +198,9 @@ Public functions the ``-s``, ``-p`` and ``-e`` options described above. They may be specified as ``str``, ``bytes`` or :py:class:`os.PathLike`. + If *hardlink_dupes* is true and two ``.pyc`` files with different optimization + level have the same content, use hard links to consolidate duplicate files. + .. versionchanged:: 3.2 Added the *legacy* and *optimize* parameter. @@ -219,9 +227,9 @@ Public functions Setting *workers* to 0 now chooses the optimal number of cores. .. versionchanged:: 3.9 - Added *stripdir*, *prependdir* and *limit_sl_dest* arguments. + Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* arguments. -.. function:: compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None) +.. function:: compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None, hardlink_dupes=False) Compile the file with path *fullname*. Return a true value if the file compiled successfully, and a false value otherwise. @@ -257,6 +265,9 @@ Public functions the ``-s``, ``-p`` and ``-e`` options described above. They may be specified as ``str``, ``bytes`` or :py:class:`os.PathLike`. + If *hardlink_dupes* is true and two ``.pyc`` files with different optimization + level have the same content, use hard links to consolidate duplicate files. + .. versionadded:: 3.2 .. versionchanged:: 3.5 @@ -273,7 +284,7 @@ Public functions The *invalidation_mode* parameter's default value is updated to None. .. versionchanged:: 3.9 - Added *stripdir*, *prependdir* and *limit_sl_dest* arguments. + Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* arguments. .. function:: compile_path(skip_curdir=True, maxlevels=0, force=False, quiet=0, legacy=False, optimize=-1, invalidation_mode=None) diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 2fec790fe3a639..fbad0fba20f4b7 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -245,6 +245,16 @@ that schedules a shutdown for the default executor that waits on the Added :class:`asyncio.PidfdChildWatcher`, a Linux-specific child watcher implementation that polls process file descriptors. (:issue:`38692`) +compileall +---------- + +Added new possibility to use hardlinks for duplicated ``.pyc`` files: *hardlink_dupes* parameter and --hardlink-dupes command line option. +(Contributed by Lumír 'Frenzy' Balhar in :issue:`40495`.) + +Added new options for path manipulation in resulting ``.pyc`` files: *stripdir*, *prependdir*, *limit_sl_dest* parameters and -s, -p, -e command line options. +Added the possibility to specify the option for an optimization level multiple times. +(Contributed by Lumír 'Frenzy' Balhar in :issue:`38112`.) + concurrent.futures ------------------ diff --git a/Lib/compileall.py b/Lib/compileall.py index abe6cffce59c5f..fe7f450c55e1c5 100644 --- a/Lib/compileall.py +++ b/Lib/compileall.py @@ -15,6 +15,7 @@ import importlib.util import py_compile import struct +import filecmp from functools import partial from pathlib import Path @@ -47,7 +48,7 @@ def _walk_dir(dir, maxlevels, quiet=0): def compile_dir(dir, maxlevels=None, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, workers=1, invalidation_mode=None, *, stripdir=None, - prependdir=None, limit_sl_dest=None): + prependdir=None, limit_sl_dest=None, hardlink_dupes=False): """Byte-compile all modules in the given directory tree. Arguments (only dir is required): @@ -70,6 +71,7 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False, after stripdir limit_sl_dest: ignore symlinks if they are pointing outside of the defined path + hardlink_dupes: hardlink duplicated pyc files """ ProcessPoolExecutor = None if ddir is not None and (stripdir is not None or prependdir is not None): @@ -104,7 +106,8 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False, invalidation_mode=invalidation_mode, stripdir=stripdir, prependdir=prependdir, - limit_sl_dest=limit_sl_dest), + limit_sl_dest=limit_sl_dest, + hardlink_dupes=hardlink_dupes), files) success = min(results, default=True) else: @@ -112,14 +115,15 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False, if not compile_file(file, ddir, force, rx, quiet, legacy, optimize, invalidation_mode, stripdir=stripdir, prependdir=prependdir, - limit_sl_dest=limit_sl_dest): + limit_sl_dest=limit_sl_dest, + hardlink_dupes=hardlink_dupes): success = False return success def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None, *, stripdir=None, prependdir=None, - limit_sl_dest=None): + limit_sl_dest=None, hardlink_dupes=False): """Byte-compile one file. Arguments (only fullname is required): @@ -140,6 +144,7 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, after stripdir limit_sl_dest: ignore symlinks if they are pointing outside of the defined path. + hardlink_dupes: hardlink duplicated pyc files """ if ddir is not None and (stripdir is not None or prependdir is not None): @@ -176,6 +181,14 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, if isinstance(optimize, int): optimize = [optimize] + # Use set() to remove duplicates. + # Use sorted() to create pyc files in a deterministic order. + optimize = sorted(set(optimize)) + + if hardlink_dupes and len(optimize) < 2: + raise ValueError("Hardlinking of duplicated bytecode makes sense " + "only for more than one optimization level") + if rx is not None: mo = rx.search(fullname) if mo: @@ -220,10 +233,16 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, if not quiet: print('Compiling {!r}...'.format(fullname)) try: - for opt_level, cfile in opt_cfiles.items(): + for index, opt_level in enumerate(optimize): + cfile = opt_cfiles[opt_level] ok = py_compile.compile(fullname, cfile, dfile, True, optimize=opt_level, invalidation_mode=invalidation_mode) + if index > 0 and hardlink_dupes: + previous_cfile = opt_cfiles[optimize[index - 1]] + if filecmp.cmp(cfile, previous_cfile, shallow=False): + os.unlink(cfile) + os.link(previous_cfile, cfile) except py_compile.PyCompileError as err: success = False if quiet >= 2: @@ -352,6 +371,9 @@ def main(): 'Python interpreter itself (specified by -O).')) parser.add_argument('-e', metavar='DIR', dest='limit_sl_dest', help='Ignore symlinks pointing outsite of the DIR') + parser.add_argument('--hardlink-dupes', action='store_true', + dest='hardlink_dupes', + help='Hardlink duplicated pyc files') args = parser.parse_args() compile_dests = args.compile_dest @@ -371,6 +393,10 @@ def main(): if args.opt_levels is None: args.opt_levels = [-1] + if len(args.opt_levels) == 1 and args.hardlink_dupes: + parser.error(("Hardlinking of duplicated bytecode makes sense " + "only for more than one optimization level.")) + if args.ddir is not None and ( args.stripdir is not None or args.prependdir is not None ): @@ -404,7 +430,8 @@ def main(): stripdir=args.stripdir, prependdir=args.prependdir, optimize=args.opt_levels, - limit_sl_dest=args.limit_sl_dest): + limit_sl_dest=args.limit_sl_dest, + hardlink_dupes=args.hardlink_dupes): success = False else: if not compile_dir(dest, maxlevels, args.ddir, @@ -414,7 +441,8 @@ def main(): stripdir=args.stripdir, prependdir=args.prependdir, optimize=args.opt_levels, - limit_sl_dest=args.limit_sl_dest): + limit_sl_dest=args.limit_sl_dest, + hardlink_dupes=args.hardlink_dupes): success = False return success else: diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py index 72678945089f28..b4061b79357b87 100644 --- a/Lib/test/test_compileall.py +++ b/Lib/test/test_compileall.py @@ -1,16 +1,19 @@ -import sys import compileall +import contextlib +import filecmp import importlib.util -import test.test_importlib.util +import io +import itertools import os import pathlib import py_compile import shutil import struct +import sys import tempfile +import test.test_importlib.util import time import unittest -import io from unittest import mock, skipUnless try: @@ -26,6 +29,24 @@ from .test_py_compile import SourceDateEpochTestMeta +def get_pyc(script, opt): + if not opt: + # Replace None and 0 with '' + opt = '' + return importlib.util.cache_from_source(script, optimization=opt) + + +def get_pycs(script): + return [get_pyc(script, opt) for opt in (0, 1, 2)] + + +def is_hardlink(filename1, filename2): + """Returns True if two files have the same inode (hardlink)""" + inode1 = os.stat(filename1).st_ino + inode2 = os.stat(filename2).st_ino + return inode1 == inode2 + + class CompileallTestsBase: def setUp(self): @@ -825,6 +846,32 @@ def test_ignore_symlink_destination(self): self.assertTrue(os.path.isfile(allowed_bc)) self.assertFalse(os.path.isfile(prohibited_bc)) + def test_hardlink_bad_args(self): + # Bad arguments combination, hardlink deduplication make sense + # only for more than one optimization level + self.assertRunNotOK(self.directory, "-o 1", "--hardlink-dupes") + + def test_hardlink(self): + # 'a = 0' code produces the same bytecode for the 3 optimization + # levels. All three .pyc files must have the same inode (hardlinks). + # + # If deduplication is disabled, all pyc files must have different + # inodes. + for dedup in (True, False): + with tempfile.TemporaryDirectory() as path: + with self.subTest(dedup=dedup): + script = script_helper.make_script(path, "script", "a = 0") + pycs = get_pycs(script) + + args = ["-q", "-o 0", "-o 1", "-o 2"] + if dedup: + args.append("--hardlink-dupes") + self.assertRunOK(path, *args) + + self.assertEqual(is_hardlink(pycs[0], pycs[1]), dedup) + self.assertEqual(is_hardlink(pycs[1], pycs[2]), dedup) + self.assertEqual(is_hardlink(pycs[0], pycs[2]), dedup) + class CommandLineTestsWithSourceEpoch(CommandLineTestsBase, unittest.TestCase, @@ -841,5 +888,176 @@ class CommandLineTestsNoSourceEpoch(CommandLineTestsBase, +class HardlinkDedupTestsBase: + # Test hardlink_dupes parameter of compileall.compile_dir() + + def setUp(self): + self.path = None + + @contextlib.contextmanager + def temporary_directory(self): + with tempfile.TemporaryDirectory() as path: + self.path = path + yield path + self.path = None + + def make_script(self, code, name="script"): + return script_helper.make_script(self.path, name, code) + + def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False): + compileall.compile_dir(self.path, quiet=True, optimize=optimize, + hardlink_dupes=dedup, force=force) + + def test_bad_args(self): + # Bad arguments combination, hardlink deduplication make sense + # only for more than one optimization level + with self.temporary_directory(): + self.make_script("pass") + with self.assertRaises(ValueError): + compileall.compile_dir(self.path, quiet=True, optimize=0, + hardlink_dupes=True) + with self.assertRaises(ValueError): + # same optimization level specified twice: + # compile_dir() removes duplicates + compileall.compile_dir(self.path, quiet=True, optimize=[0, 0], + hardlink_dupes=True) + + def create_code(self, docstring=False, assertion=False): + lines = [] + if docstring: + lines.append("'module docstring'") + lines.append('x = 1') + if assertion: + lines.append("assert x == 1") + return '\n'.join(lines) + + def iter_codes(self): + for docstring in (False, True): + for assertion in (False, True): + code = self.create_code(docstring=docstring, assertion=assertion) + yield (code, docstring, assertion) + + def test_disabled(self): + # Deduplication disabled, no hardlinks + for code, docstring, assertion in self.iter_codes(): + with self.subTest(docstring=docstring, assertion=assertion): + with self.temporary_directory(): + script = self.make_script(code) + pycs = get_pycs(script) + self.compile_dir(dedup=False) + self.assertFalse(is_hardlink(pycs[0], pycs[1])) + self.assertFalse(is_hardlink(pycs[0], pycs[2])) + self.assertFalse(is_hardlink(pycs[1], pycs[2])) + + def check_hardlinks(self, script, docstring=False, assertion=False): + pycs = get_pycs(script) + self.assertEqual(is_hardlink(pycs[0], pycs[1]), + not assertion) + self.assertEqual(is_hardlink(pycs[0], pycs[2]), + not assertion and not docstring) + self.assertEqual(is_hardlink(pycs[1], pycs[2]), + not docstring) + + def test_hardlink(self): + # Test deduplication on all combinations + for code, docstring, assertion in self.iter_codes(): + with self.subTest(docstring=docstring, assertion=assertion): + with self.temporary_directory(): + script = self.make_script(code) + self.compile_dir() + self.check_hardlinks(script, docstring, assertion) + + def test_only_two_levels(self): + # Don't build the 3 optimization levels, but only 2 + for opts in ((0, 1), (1, 2), (0, 2)): + with self.subTest(opts=opts): + with self.temporary_directory(): + # code with no dostring and no assertion: + # same bytecode for all optimization levels + script = self.make_script(self.create_code()) + self.compile_dir(optimize=opts) + pyc1 = get_pyc(script, opts[0]) + pyc2 = get_pyc(script, opts[1]) + self.assertTrue(is_hardlink(pyc1, pyc2)) + + def test_duplicated_levels(self): + # compile_dir() must not fail if optimize contains duplicated + # optimization levels and/or if optimization levels are not sorted. + with self.temporary_directory(): + # code with no dostring and no assertion: + # same bytecode for all optimization levels + script = self.make_script(self.create_code()) + self.compile_dir(optimize=[1, 0, 1, 0]) + pyc1 = get_pyc(script, 0) + pyc2 = get_pyc(script, 1) + self.assertTrue(is_hardlink(pyc1, pyc2)) + + def test_recompilation(self): + # Test compile_dir() when pyc files already exists and the script + # content changed + with self.temporary_directory(): + script = self.make_script("a = 0") + self.compile_dir() + # All three levels have the same inode + self.check_hardlinks(script) + + pycs = get_pycs(script) + inode = os.stat(pycs[0]).st_ino + + # Change of the module content + script = self.make_script("print(0)") + + # Recompilation without -o 1 + self.compile_dir(optimize=[0, 2], force=True) + + # opt-1.pyc should have the same inode as before and others should not + self.assertEqual(inode, os.stat(pycs[1]).st_ino) + self.assertTrue(is_hardlink(pycs[0], pycs[2])) + self.assertNotEqual(inode, os.stat(pycs[2]).st_ino) + # opt-1.pyc and opt-2.pyc have different content + self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True)) + + def test_import(self): + # Test that import updates a single pyc file when pyc files already + # exists and the script content changed + with self.temporary_directory(): + script = self.make_script(self.create_code(), name="module") + self.compile_dir() + # All three levels have the same inode + self.check_hardlinks(script) + + pycs = get_pycs(script) + inode = os.stat(pycs[0]).st_ino + + # Change of the module content + script = self.make_script("print(0)", name="module") + + # Import the module in Python with -O (optimization level 1) + script_helper.assert_python_ok( + "-O", "-c", "import module", __isolated=False, PYTHONPATH=self.path + ) + + # Only opt-1.pyc is changed + self.assertEqual(inode, os.stat(pycs[0]).st_ino) + self.assertEqual(inode, os.stat(pycs[2]).st_ino) + self.assertFalse(is_hardlink(pycs[1], pycs[2])) + # opt-1.pyc and opt-2.pyc have different content + self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True)) + + +class HardlinkDedupTestsWithSourceEpoch(HardlinkDedupTestsBase, + unittest.TestCase, + metaclass=SourceDateEpochTestMeta, + source_date_epoch=True): + pass + + +class HardlinkDedupTestsNoSourceEpoch(HardlinkDedupTestsBase, + unittest.TestCase, + metaclass=SourceDateEpochTestMeta, + source_date_epoch=False): + pass + + if __name__ == "__main__": unittest.main() diff --git a/Misc/ACKS b/Misc/ACKS index f744de6b1f66d2..b479aa5d807f56 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -86,6 +86,7 @@ Marcin Bachry Alfonso Baciero Dwayne Bailey Stig Bakken +Lumír Balhar Aleksandr Balezin Greg Ball Lewis Ball diff --git a/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst b/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst new file mode 100644 index 00000000000000..d3049b05a78b6c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst @@ -0,0 +1,2 @@ +:mod:`compileall` is now able to use hardlinks to prevent duplicates in a +case when ``.pyc`` files for different optimization levels have the same content. From 97f33c35445e6d67df24dcbafef7b78333feb778 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 14 May 2020 18:05:58 +0200 Subject: [PATCH 096/115] bpo-40549: posixmodule.c uses defining_class (GH-20075) Pass PEP 573 defining_class to os.DirEntry methods. The module state is now retrieve from defining_class rather than Py_TYPE(self), to support subclasses (even if DirEntry doesn't support subclasses yet). * Pass the module rather than defining_class to DirEntry_fetch_stat(). * Only get the module state once in _posix_clear(), _posix_traverse() and _posixmodule_exec(). --- Modules/clinic/posixmodule.c.h | 91 +++++++---------- Modules/posixmodule.c | 178 ++++++++++++++++++--------------- 2 files changed, 132 insertions(+), 137 deletions(-) diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index cf6d7449bac832..41baa455739797 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -8388,18 +8388,24 @@ PyDoc_STRVAR(os_DirEntry_is_symlink__doc__, "Return True if the entry is a symbolic link; cached per entry."); #define OS_DIRENTRY_IS_SYMLINK_METHODDEF \ - {"is_symlink", (PyCFunction)os_DirEntry_is_symlink, METH_NOARGS, os_DirEntry_is_symlink__doc__}, + {"is_symlink", (PyCFunction)(void(*)(void))os_DirEntry_is_symlink, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_symlink__doc__}, static int -os_DirEntry_is_symlink_impl(DirEntry *self); +os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class); static PyObject * -os_DirEntry_is_symlink(DirEntry *self, PyObject *Py_UNUSED(ignored)) +os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":is_symlink", _keywords, 0}; int _return_value; - _return_value = os_DirEntry_is_symlink_impl(self); + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + _return_value = os_DirEntry_is_symlink_impl(self, defining_class); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -8416,34 +8422,25 @@ PyDoc_STRVAR(os_DirEntry_stat__doc__, "Return stat_result object for the entry; cached per entry."); #define OS_DIRENTRY_STAT_METHODDEF \ - {"stat", (PyCFunction)(void(*)(void))os_DirEntry_stat, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_stat__doc__}, + {"stat", (PyCFunction)(void(*)(void))os_DirEntry_stat, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_stat__doc__}, static PyObject * -os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks); +os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks); static PyObject * -os_DirEntry_stat(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_stat(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"follow_symlinks", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "stat", 0}; - PyObject *argsbuf[1]; - Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + static _PyArg_Parser _parser = {"|$p:stat", _keywords, 0}; int follow_symlinks = 1; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); - if (!args) { - goto exit; - } - if (!noptargs) { - goto skip_optional_kwonly; - } - follow_symlinks = PyObject_IsTrue(args[0]); - if (follow_symlinks < 0) { + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &follow_symlinks)) { goto exit; } -skip_optional_kwonly: - return_value = os_DirEntry_stat_impl(self, follow_symlinks); + return_value = os_DirEntry_stat_impl(self, defining_class, follow_symlinks); exit: return return_value; @@ -8456,35 +8453,26 @@ PyDoc_STRVAR(os_DirEntry_is_dir__doc__, "Return True if the entry is a directory; cached per entry."); #define OS_DIRENTRY_IS_DIR_METHODDEF \ - {"is_dir", (PyCFunction)(void(*)(void))os_DirEntry_is_dir, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_dir__doc__}, + {"is_dir", (PyCFunction)(void(*)(void))os_DirEntry_is_dir, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_dir__doc__}, static int -os_DirEntry_is_dir_impl(DirEntry *self, int follow_symlinks); +os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks); static PyObject * -os_DirEntry_is_dir(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_dir(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"follow_symlinks", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "is_dir", 0}; - PyObject *argsbuf[1]; - Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + static _PyArg_Parser _parser = {"|$p:is_dir", _keywords, 0}; int follow_symlinks = 1; int _return_value; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); - if (!args) { - goto exit; - } - if (!noptargs) { - goto skip_optional_kwonly; - } - follow_symlinks = PyObject_IsTrue(args[0]); - if (follow_symlinks < 0) { + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &follow_symlinks)) { goto exit; } -skip_optional_kwonly: - _return_value = os_DirEntry_is_dir_impl(self, follow_symlinks); + _return_value = os_DirEntry_is_dir_impl(self, defining_class, follow_symlinks); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -8501,35 +8489,26 @@ PyDoc_STRVAR(os_DirEntry_is_file__doc__, "Return True if the entry is a file; cached per entry."); #define OS_DIRENTRY_IS_FILE_METHODDEF \ - {"is_file", (PyCFunction)(void(*)(void))os_DirEntry_is_file, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_file__doc__}, + {"is_file", (PyCFunction)(void(*)(void))os_DirEntry_is_file, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_file__doc__}, static int -os_DirEntry_is_file_impl(DirEntry *self, int follow_symlinks); +os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks); static PyObject * -os_DirEntry_is_file(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_file(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"follow_symlinks", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "is_file", 0}; - PyObject *argsbuf[1]; - Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + static _PyArg_Parser _parser = {"|$p:is_file", _keywords, 0}; int follow_symlinks = 1; int _return_value; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); - if (!args) { - goto exit; - } - if (!noptargs) { - goto skip_optional_kwonly; - } - follow_symlinks = PyObject_IsTrue(args[0]); - if (follow_symlinks < 0) { + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &follow_symlinks)) { goto exit; } -skip_optional_kwonly: - _return_value = os_DirEntry_is_file_impl(self, follow_symlinks); + _return_value = os_DirEntry_is_file_impl(self, defining_class, follow_symlinks); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -9417,4 +9396,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=be90d3aba972098b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=005919eaaef3f8e6 input=a9049054013a1b77]*/ diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 60a60e9aed76b6..2ddf30de89a680 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -2101,48 +2101,50 @@ statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds) static int _posix_clear(PyObject *module) { - Py_CLEAR(get_posix_state(module)->billion); - Py_CLEAR(get_posix_state(module)->DirEntryType); - Py_CLEAR(get_posix_state(module)->ScandirIteratorType); + _posixstate *state = get_posix_state(module); + Py_CLEAR(state->billion); + Py_CLEAR(state->DirEntryType); + Py_CLEAR(state->ScandirIteratorType); #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) - Py_CLEAR(get_posix_state(module)->SchedParamType); + Py_CLEAR(state->SchedParamType); #endif - Py_CLEAR(get_posix_state(module)->StatResultType); - Py_CLEAR(get_posix_state(module)->StatVFSResultType); - Py_CLEAR(get_posix_state(module)->TerminalSizeType); - Py_CLEAR(get_posix_state(module)->TimesResultType); - Py_CLEAR(get_posix_state(module)->UnameResultType); + Py_CLEAR(state->StatResultType); + Py_CLEAR(state->StatVFSResultType); + Py_CLEAR(state->TerminalSizeType); + Py_CLEAR(state->TimesResultType); + Py_CLEAR(state->UnameResultType); #if defined(HAVE_WAITID) && !defined(__APPLE__) - Py_CLEAR(get_posix_state(module)->WaitidResultType); + Py_CLEAR(state->WaitidResultType); #endif #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) - Py_CLEAR(get_posix_state(module)->struct_rusage); + Py_CLEAR(state->struct_rusage); #endif - Py_CLEAR(get_posix_state(module)->st_mode); + Py_CLEAR(state->st_mode); return 0; } static int _posix_traverse(PyObject *module, visitproc visit, void *arg) { - Py_VISIT(get_posix_state(module)->billion); - Py_VISIT(get_posix_state(module)->DirEntryType); - Py_VISIT(get_posix_state(module)->ScandirIteratorType); + _posixstate *state = get_posix_state(module); + Py_VISIT(state->billion); + Py_VISIT(state->DirEntryType); + Py_VISIT(state->ScandirIteratorType); #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) - Py_VISIT(get_posix_state(module)->SchedParamType); + Py_VISIT(state->SchedParamType); #endif - Py_VISIT(get_posix_state(module)->StatResultType); - Py_VISIT(get_posix_state(module)->StatVFSResultType); - Py_VISIT(get_posix_state(module)->TerminalSizeType); - Py_VISIT(get_posix_state(module)->TimesResultType); - Py_VISIT(get_posix_state(module)->UnameResultType); + Py_VISIT(state->StatResultType); + Py_VISIT(state->StatVFSResultType); + Py_VISIT(state->TerminalSizeType); + Py_VISIT(state->TimesResultType); + Py_VISIT(state->UnameResultType); #if defined(HAVE_WAITID) && !defined(__APPLE__) - Py_VISIT(get_posix_state(module)->WaitidResultType); + Py_VISIT(state->WaitidResultType); #endif #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) - Py_VISIT(get_posix_state(module)->struct_rusage); + Py_VISIT(state->struct_rusage); #endif - Py_VISIT(get_posix_state(module)->st_mode); + Py_VISIT(state->st_mode); return 0; } @@ -12747,17 +12749,20 @@ DirEntry_dealloc(DirEntry *entry) /* Forward reference */ static int -DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits); +DirEntry_test_mode(PyTypeObject *defining_class, DirEntry *self, + int follow_symlinks, unsigned short mode_bits); /*[clinic input] os.DirEntry.is_symlink -> bool + defining_class: defining_class + / Return True if the entry is a symbolic link; cached per entry. [clinic start generated code]*/ static int -os_DirEntry_is_symlink_impl(DirEntry *self) -/*[clinic end generated code: output=42244667d7bcfc25 input=1605a1b4b96976c3]*/ +os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class) +/*[clinic end generated code: output=293096d589b6d47c input=e9acc5ee4d511113]*/ { #ifdef MS_WINDOWS return (self->win32_lstat.st_mode & S_IFMT) == S_IFLNK; @@ -12766,21 +12771,15 @@ os_DirEntry_is_symlink_impl(DirEntry *self) if (self->d_type != DT_UNKNOWN) return self->d_type == DT_LNK; else - return DirEntry_test_mode(self, 0, S_IFLNK); + return DirEntry_test_mode(defining_class, self, 0, S_IFLNK); #else /* POSIX without d_type */ - return DirEntry_test_mode(self, 0, S_IFLNK); + return DirEntry_test_mode(defining_class, self, 0, S_IFLNK); #endif } -static inline PyObject* -DirEntry_get_module(DirEntry *self) -{ - return PyType_GetModule(Py_TYPE(self)); -} - static PyObject * -DirEntry_fetch_stat(DirEntry *self, int follow_symlinks) +DirEntry_fetch_stat(PyObject *module, DirEntry *self, int follow_symlinks) { int result; STRUCT_STAT st; @@ -12816,18 +12815,18 @@ DirEntry_fetch_stat(DirEntry *self, int follow_symlinks) if (result != 0) return path_object_error(self->path); - return _pystat_fromstructstat(DirEntry_get_module(self), &st); + return _pystat_fromstructstat(module, &st); } static PyObject * -DirEntry_get_lstat(DirEntry *self) +DirEntry_get_lstat(PyTypeObject *defining_class, DirEntry *self) { if (!self->lstat) { + PyObject *module = PyType_GetModule(defining_class); #ifdef MS_WINDOWS - self->lstat = _pystat_fromstructstat(DirEntry_get_module(self), - &self->win32_lstat); + self->lstat = _pystat_fromstructstat(module, &self->win32_lstat); #else /* POSIX */ - self->lstat = DirEntry_fetch_stat(self, 0); + self->lstat = DirEntry_fetch_stat(module, self, 0); #endif } Py_XINCREF(self->lstat); @@ -12836,6 +12835,8 @@ DirEntry_get_lstat(DirEntry *self) /*[clinic input] os.DirEntry.stat + defining_class: defining_class + / * follow_symlinks: bool = True @@ -12843,20 +12844,26 @@ Return stat_result object for the entry; cached per entry. [clinic start generated code]*/ static PyObject * -os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks) -/*[clinic end generated code: output=008593b3a6d01305 input=280d14c1d6f1d00d]*/ +os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks) +/*[clinic end generated code: output=23f803e19c3e780e input=e816273c4e67ee98]*/ { - if (!follow_symlinks) - return DirEntry_get_lstat(self); + if (!follow_symlinks) { + return DirEntry_get_lstat(defining_class, self); + } if (!self->stat) { - int result = os_DirEntry_is_symlink_impl(self); - if (result == -1) + int result = os_DirEntry_is_symlink_impl(self, defining_class); + if (result == -1) { return NULL; - else if (result) - self->stat = DirEntry_fetch_stat(self, 1); - else - self->stat = DirEntry_get_lstat(self); + } + if (result) { + PyObject *module = PyType_GetModule(defining_class); + self->stat = DirEntry_fetch_stat(module, self, 1); + } + else { + self->stat = DirEntry_get_lstat(defining_class, self); + } } Py_XINCREF(self->stat); @@ -12865,7 +12872,8 @@ os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks) /* Set exception and return -1 on error, 0 for False, 1 for True */ static int -DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits) +DirEntry_test_mode(PyTypeObject *defining_class, DirEntry *self, + int follow_symlinks, unsigned short mode_bits) { PyObject *stat = NULL; PyObject *st_mode = NULL; @@ -12890,7 +12898,7 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits #if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE) if (need_stat) { #endif - stat = os_DirEntry_stat_impl(self, follow_symlinks); + stat = os_DirEntry_stat_impl(self, defining_class, follow_symlinks); if (!stat) { if (PyErr_ExceptionMatches(PyExc_FileNotFoundError)) { /* If file doesn't exist (anymore), then return False @@ -12900,7 +12908,8 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits } goto error; } - st_mode = PyObject_GetAttr(stat, get_posix_state(DirEntry_get_module(self))->st_mode); + _posixstate* state = get_posix_state(PyType_GetModule(defining_class)); + st_mode = PyObject_GetAttr(stat, state->st_mode); if (!st_mode) goto error; @@ -12943,6 +12952,8 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits /*[clinic input] os.DirEntry.is_dir -> bool + defining_class: defining_class + / * follow_symlinks: bool = True @@ -12950,14 +12961,17 @@ Return True if the entry is a directory; cached per entry. [clinic start generated code]*/ static int -os_DirEntry_is_dir_impl(DirEntry *self, int follow_symlinks) -/*[clinic end generated code: output=ad2e8d54365da287 input=0135232766f53f58]*/ +os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks) +/*[clinic end generated code: output=0cd453b9c0987fdf input=1a4ffd6dec9920cb]*/ { - return DirEntry_test_mode(self, follow_symlinks, S_IFDIR); + return DirEntry_test_mode(defining_class, self, follow_symlinks, S_IFDIR); } /*[clinic input] os.DirEntry.is_file -> bool + defining_class: defining_class + / * follow_symlinks: bool = True @@ -12965,10 +12979,11 @@ Return True if the entry is a file; cached per entry. [clinic start generated code]*/ static int -os_DirEntry_is_file_impl(DirEntry *self, int follow_symlinks) -/*[clinic end generated code: output=8462ade481d8a476 input=0dc90be168b041ee]*/ +os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks) +/*[clinic end generated code: output=f7c277ab5ba80908 input=0a64c5a12e802e3b]*/ { - return DirEntry_test_mode(self, follow_symlinks, S_IFREG); + return DirEntry_test_mode(defining_class, self, follow_symlinks, S_IFREG); } /*[clinic input] @@ -13496,6 +13511,8 @@ static PyType_Spec ScandirIteratorType_spec = { MODNAME ".ScandirIterator", sizeof(ScandirIterator), 0, + // bpo-40549: Py_TPFLAGS_BASETYPE should not be used, since + // PyType_GetModule(Py_TYPE(self)) doesn't work on a subclass instance. Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_FINALIZE, ScandirIteratorType_slots }; @@ -14785,12 +14802,10 @@ static const char * const have_functions[] = { static int posixmodule_exec(PyObject *m) { - PyObject *v; - PyObject *list; - const char * const *trace; + _posixstate *state = get_posix_state(m); /* Initialize environ dictionary */ - v = convertenviron(); + PyObject *v = convertenviron(); Py_XINCREF(v); if (v == NULL || PyModule_AddObject(m, "environ", v) != 0) return -1; @@ -14813,7 +14828,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(WaitidResultType); PyModule_AddObject(m, "waitid_result", WaitidResultType); - get_posix_state(m)->WaitidResultType = WaitidResultType; + state->WaitidResultType = WaitidResultType; #endif stat_result_desc.name = "os.stat_result"; /* see issue #19209 */ @@ -14826,7 +14841,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(StatResultType); PyModule_AddObject(m, "stat_result", StatResultType); - get_posix_state(m)->StatResultType = StatResultType; + state->StatResultType = StatResultType; structseq_new = ((PyTypeObject *)StatResultType)->tp_new; ((PyTypeObject *)StatResultType)->tp_new = statresult_new; @@ -14837,7 +14852,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(StatVFSResultType); PyModule_AddObject(m, "statvfs_result", StatVFSResultType); - get_posix_state(m)->StatVFSResultType = StatVFSResultType; + state->StatVFSResultType = StatVFSResultType; #ifdef NEED_TICKS_PER_SECOND # if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK) ticks_per_second = sysconf(_SC_CLK_TCK); @@ -14856,7 +14871,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(SchedParamType); PyModule_AddObject(m, "sched_param", SchedParamType); - get_posix_state(m)->SchedParamType = SchedParamType; + state->SchedParamType = SchedParamType; ((PyTypeObject *)SchedParamType)->tp_new = os_sched_param; #endif @@ -14867,14 +14882,14 @@ posixmodule_exec(PyObject *m) } Py_INCREF(TerminalSizeType); PyModule_AddObject(m, "terminal_size", TerminalSizeType); - get_posix_state(m)->TerminalSizeType = TerminalSizeType; + state->TerminalSizeType = TerminalSizeType; /* initialize scandir types */ PyObject *ScandirIteratorType = PyType_FromModuleAndSpec(m, &ScandirIteratorType_spec, NULL); if (ScandirIteratorType == NULL) { return -1; } - get_posix_state(m)->ScandirIteratorType = ScandirIteratorType; + state->ScandirIteratorType = ScandirIteratorType; PyObject *DirEntryType = PyType_FromModuleAndSpec(m, &DirEntryType_spec, NULL); if (DirEntryType == NULL) { @@ -14882,7 +14897,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(DirEntryType); PyModule_AddObject(m, "DirEntry", DirEntryType); - get_posix_state(m)->DirEntryType = DirEntryType; + state->DirEntryType = DirEntryType; times_result_desc.name = MODNAME ".times_result"; PyObject *TimesResultType = (PyObject *)PyStructSequence_NewType(×_result_desc); @@ -14891,7 +14906,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(TimesResultType); PyModule_AddObject(m, "times_result", TimesResultType); - get_posix_state(m)->TimesResultType = TimesResultType; + state->TimesResultType = TimesResultType; PyTypeObject *UnameResultType = PyStructSequence_NewType(&uname_result_desc); if (UnameResultType == NULL) { @@ -14899,7 +14914,7 @@ posixmodule_exec(PyObject *m) } Py_INCREF(UnameResultType); PyModule_AddObject(m, "uname_result", (PyObject *)UnameResultType); - get_posix_state(m)->UnameResultType = (PyObject *)UnameResultType; + state->UnameResultType = (PyObject *)UnameResultType; #ifdef __APPLE__ /* @@ -14939,15 +14954,15 @@ posixmodule_exec(PyObject *m) #endif /* __APPLE__ */ - if ((get_posix_state(m)->billion = PyLong_FromLong(1000000000)) == NULL) + if ((state->billion = PyLong_FromLong(1000000000)) == NULL) return -1; #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) - get_posix_state(m)->struct_rusage = PyUnicode_InternFromString("struct_rusage"); - if (get_posix_state(m)->struct_rusage == NULL) + state->struct_rusage = PyUnicode_InternFromString("struct_rusage"); + if (state->struct_rusage == NULL) return -1; #endif - get_posix_state(m)->st_mode = PyUnicode_InternFromString("st_mode"); - if (get_posix_state(m)->st_mode == NULL) + state->st_mode = PyUnicode_InternFromString("st_mode"); + if (state->st_mode == NULL) return -1; /* suppress "function not used" warnings */ @@ -14964,10 +14979,11 @@ posixmodule_exec(PyObject *m) * provide list of locally available functions * so os.py can populate support_* lists */ - list = PyList_New(0); - if (!list) + PyObject *list = PyList_New(0); + if (!list) { return -1; - for (trace = have_functions; *trace; trace++) { + } + for (const char * const *trace = have_functions; *trace; trace++) { PyObject *unicode = PyUnicode_DecodeASCII(*trace, strlen(*trace), NULL); if (!unicode) return -1; From f2c3b6823bc4777d4a14eb0c3615b719521f763a Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 14 May 2020 18:46:24 +0200 Subject: [PATCH 097/115] Revert "bpo-32604: [_xxsubinterpreters] Propagate exceptions. (GH-19768)" (GH-20089) * Revert "bpo-40613: Remove compiler warning from _xxsubinterpretersmodule (GH-20069)" This reverts commit fa0a66e62d087765dbc5c1b89d6149a23ecfb0a6. * Revert "bpo-32604: [_xxsubinterpreters] Propagate exceptions. (GH-19768)" This reverts commit a1d9e0accd33af1d8e90fc48b34c13d7b07dcf57. --- Lib/test/test__xxsubinterpreters.py | 301 +------ Modules/_xxsubinterpretersmodule.c | 1147 +++------------------------ 2 files changed, 123 insertions(+), 1325 deletions(-) diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index 039c040ad39508..e17bfde2c2f75a 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -1,4 +1,3 @@ -import builtins from collections import namedtuple import contextlib import itertools @@ -867,11 +866,10 @@ def assert_run_failed(self, exctype, msg=None): yield if msg is None: self.assertEqual(str(caught.exception).split(':')[0], - exctype.__name__) + str(exctype)) else: self.assertEqual(str(caught.exception), - "{}: {}".format(exctype.__name__, msg)) - self.assertIsInstance(caught.exception.__cause__, exctype) + "{}: {}".format(exctype, msg)) def test_invalid_syntax(self): with self.assert_run_failed(SyntaxError): @@ -1062,301 +1060,6 @@ def f(): self.assertEqual(retcode, 0) -def build_exception(exctype, /, *args, **kwargs): - # XXX Use __qualname__? - name = exctype.__name__ - argreprs = [repr(a) for a in args] - if kwargs: - kwargreprs = [f'{k}={v!r}' for k, v in kwargs.items()] - script = f'{name}({", ".join(argreprs)}, {", ".join(kwargreprs)})' - else: - script = f'{name}({", ".join(argreprs)})' - expected = exctype(*args, **kwargs) - return script, expected - - -def build_exceptions(self, *exctypes, default=None, custom=None, bases=True): - if not exctypes: - raise NotImplementedError - if not default: - default = ((), {}) - elif isinstance(default, str): - default = ((default,), {}) - elif type(default) is not tuple: - raise NotImplementedError - elif len(default) != 2: - default = (default, {}) - elif type(default[0]) is not tuple: - default = (default, {}) - elif type(default[1]) is not dict: - default = (default, {}) - # else leave it alone - - for exctype in exctypes: - customtype = None - values = default - if custom: - if exctype in custom: - customtype = exctype - elif bases: - for customtype in custom: - if issubclass(exctype, customtype): - break - else: - customtype = None - if customtype is not None: - values = custom[customtype] - if values is None: - continue - args, kwargs = values - script, expected = build_exception(exctype, *args, **kwargs) - yield exctype, customtype, script, expected - - -try: - raise Exception -except Exception as exc: - assert exc.__traceback__ is not None - Traceback = type(exc.__traceback__) - - -class RunFailedTests(TestBase): - - BUILTINS = [v - for v in vars(builtins).values() - if (type(v) is type - and issubclass(v, Exception) - #and issubclass(v, BaseException) - ) - ] - BUILTINS_SPECIAL = [ - # These all have extra attributes (i.e. args/kwargs) - SyntaxError, - ImportError, - UnicodeError, - OSError, - SystemExit, - StopIteration, - ] - - @classmethod - def build_exceptions(cls, exctypes=None, default=(), custom=None): - if exctypes is None: - exctypes = cls.BUILTINS - if custom is None: - # Skip the "special" ones. - custom = {et: None for et in cls.BUILTINS_SPECIAL} - yield from build_exceptions(*exctypes, default=default, custom=custom) - - def assertExceptionsEqual(self, exc, expected, *, chained=True): - if type(expected) is type: - self.assertIs(type(exc), expected) - return - elif not isinstance(exc, Exception): - self.assertEqual(exc, expected) - elif not isinstance(expected, Exception): - self.assertEqual(exc, expected) - else: - # Plain equality doesn't work, so we have to compare manually. - self.assertIs(type(exc), type(expected)) - self.assertEqual(exc.args, expected.args) - self.assertEqual(exc.__reduce__(), expected.__reduce__()) - if chained: - self.assertExceptionsEqual(exc.__context__, - expected.__context__) - self.assertExceptionsEqual(exc.__cause__, - expected.__cause__) - self.assertEqual(exc.__suppress_context__, - expected.__suppress_context__) - - def assertTracebacksEqual(self, tb, expected): - if not isinstance(tb, Traceback): - self.assertEqual(tb, expected) - elif not isinstance(expected, Traceback): - self.assertEqual(tb, expected) - else: - self.assertEqual(tb.tb_frame.f_code.co_name, - expected.tb_frame.f_code.co_name) - self.assertEqual(tb.tb_frame.f_code.co_filename, - expected.tb_frame.f_code.co_filename) - self.assertEqual(tb.tb_lineno, expected.tb_lineno) - self.assertTracebacksEqual(tb.tb_next, expected.tb_next) - - # XXX Move this to TestBase? - @contextlib.contextmanager - def expected_run_failure(self, expected): - exctype = expected if type(expected) is type else type(expected) - - with self.assertRaises(interpreters.RunFailedError) as caught: - yield caught - exc = caught.exception - - modname = exctype.__module__ - if modname == 'builtins' or modname == '__main__': - exctypename = exctype.__name__ - else: - exctypename = f'{modname}.{exctype.__name__}' - if exctype is expected: - self.assertEqual(str(exc).split(':')[0], exctypename) - else: - self.assertEqual(str(exc), f'{exctypename}: {expected}') - self.assertExceptionsEqual(exc.__cause__, expected) - if exc.__cause__ is not None: - self.assertIsNotNone(exc.__cause__.__traceback__) - - def test_builtin_exceptions(self): - interpid = interpreters.create() - msg = '' - for i, info in enumerate(self.build_exceptions( - default=msg, - custom={ - SyntaxError: ((msg, '', 1, 3, 'a +?'), {}), - ImportError: ((msg,), {'name': 'spam', 'path': '/x/spam.py'}), - UnicodeError: None, - #UnicodeError: ((), {}), - #OSError: ((), {}), - SystemExit: ((1,), {}), - StopIteration: (('',), {}), - }, - )): - exctype, _, script, expected = info - testname = f'{i+1} - {script}' - script = f'raise {script}' - - with self.subTest(testname): - with self.expected_run_failure(expected): - interpreters.run_string(interpid, script) - - def test_custom_exception_from___main__(self): - script = dedent(""" - class SpamError(Exception): - def __init__(self, q): - super().__init__(f'got {q}') - self.q = q - raise SpamError('eggs') - """) - expected = Exception(f'SpamError: got {"eggs"}') - - interpid = interpreters.create() - with self.assertRaises(interpreters.RunFailedError) as caught: - interpreters.run_string(interpid, script) - cause = caught.exception.__cause__ - - self.assertExceptionsEqual(cause, expected) - - class SpamError(Exception): - # The normal Exception.__reduce__() produces a funny result - # here. So we have to use a custom __new__(). - def __new__(cls, q): - if type(q) is SpamError: - return q - return super().__new__(cls, q) - def __init__(self, q): - super().__init__(f'got {q}') - self.q = q - - def test_custom_exception(self): - script = dedent(""" - import test.test__xxsubinterpreters - SpamError = test.test__xxsubinterpreters.RunFailedTests.SpamError - raise SpamError('eggs') - """) - try: - ns = {} - exec(script, ns, ns) - except Exception as exc: - expected = exc - - interpid = interpreters.create() - with self.expected_run_failure(expected): - interpreters.run_string(interpid, script) - - class SpamReducedError(Exception): - def __init__(self, q): - super().__init__(f'got {q}') - self.q = q - def __reduce__(self): - return (type(self), (self.q,), {}) - - def test_custom___reduce__(self): - script = dedent(""" - import test.test__xxsubinterpreters - SpamError = test.test__xxsubinterpreters.RunFailedTests.SpamReducedError - raise SpamError('eggs') - """) - try: - exec(script, (ns := {'__name__': '__main__'}), ns) - except Exception as exc: - expected = exc - - interpid = interpreters.create() - with self.expected_run_failure(expected): - interpreters.run_string(interpid, script) - - def test_traceback_propagated(self): - script = dedent(""" - def do_spam(): - raise Exception('uh-oh') - def do_eggs(): - return do_spam() - class Spam: - def do(self): - return do_eggs() - def get_handler(): - def handler(): - return Spam().do() - return handler - go = (lambda: get_handler()()) - def iter_all(): - yield from (go() for _ in [True]) - yield None - def main(): - for v in iter_all(): - pass - main() - """) - try: - ns = {} - exec(script, ns, ns) - except Exception as exc: - expected = exc - expectedtb = exc.__traceback__.tb_next - - interpid = interpreters.create() - with self.expected_run_failure(expected) as caught: - interpreters.run_string(interpid, script) - exc = caught.exception - - self.assertTracebacksEqual(exc.__cause__.__traceback__, - expectedtb) - - def test_chained_exceptions(self): - script = dedent(""" - try: - raise ValueError('msg 1') - except Exception as exc1: - try: - raise TypeError('msg 2') - except Exception as exc2: - try: - raise IndexError('msg 3') from exc2 - except Exception: - raise AttributeError('msg 4') - """) - try: - exec(script, {}, {}) - except Exception as exc: - expected = exc - - interpid = interpreters.create() - with self.expected_run_failure(expected) as caught: - interpreters.run_string(interpid, script) - exc = caught.exception - - # ...just to be sure. - self.assertIs(type(exc.__cause__), AttributeError) - - ################################## # channel tests diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 18dd8918e7c89a..8a6fce9e0b4bd9 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -1,4 +1,5 @@ -/* _interpreters module */ + +/* interpreters module */ /* low-level access to interpreter primitives */ #include "Python.h" @@ -6,927 +7,35 @@ #include "interpreteridobject.h" -// XXX Emit a warning? -#define IGNORE_FAILURE(msg) \ - fprintf(stderr, " -----\nRunFailedError: %s\n", msg); \ - PyErr_PrintEx(0); \ - fprintf(stderr, " -----\n"); \ - PyErr_Clear(); - -typedef void (*_deallocfunc)(void *); - -static PyInterpreterState * -_get_current(void) -{ - // _PyInterpreterState_Get() aborts if lookup fails, so don't need - // to check the result for NULL. - return _PyInterpreterState_Get(); -} - - -/* string utils *************************************************************/ - -// PyMem_Free() must be used to dealocate the resulting string. static char * -_strdup_and_size(const char *data, Py_ssize_t *psize, _deallocfunc *dealloc) -{ - if (data == NULL) { - if (psize != NULL) { - *psize = 0; - } - if (dealloc != NULL) { - *dealloc = NULL; - } - return ""; - } - - Py_ssize_t size; - if (psize == NULL) { - size = strlen(data); - } else { - size = *psize; - if (size == 0) { - size = strlen(data); - *psize = size; // The size "return" value. - } - } - char *copied = PyMem_Malloc(size+1); - if (copied == NULL) { - PyErr_NoMemory(); - return NULL; - } - if (dealloc != NULL) { - *dealloc = PyMem_Free; - } - memcpy(copied, data, size+1); - return copied; -} - -static const char * -_pyobj_get_str_and_size(PyObject *obj, Py_ssize_t *psize) -{ - if (PyUnicode_Check(obj)) { - return PyUnicode_AsUTF8AndSize(obj, psize); - } else { - const char *data = NULL; - PyBytes_AsStringAndSize(obj, (char **)&data, psize); - return data; - } -} - -/* "raw" strings */ - -typedef struct _rawstring { - Py_ssize_t size; - const char *data; - _deallocfunc dealloc; -} _rawstring; - -static void -_rawstring_init(_rawstring *raw) -{ - raw->size = 0; - raw->data = NULL; - raw->dealloc = NULL; -} - -static _rawstring * -_rawstring_new(void) -{ - _rawstring *raw = PyMem_NEW(_rawstring, 1); - if (raw == NULL) { - PyErr_NoMemory(); - return NULL; - } - _rawstring_init(raw); - return raw; -} - -static void -_rawstring_clear(_rawstring *raw) -{ - if (raw->data != NULL && raw->dealloc != NULL) { - (*raw->dealloc)((void *)raw->data); - } - _rawstring_init(raw); -} - -static void -_rawstring_free(_rawstring *raw) -{ - _rawstring_clear(raw); - PyMem_Free(raw); -} - -static int -_rawstring_is_clear(_rawstring *raw) -{ - return raw->size == 0 && raw->data == NULL && raw->dealloc == NULL; -} - -//static void -//_rawstring_move(_rawstring *raw, _rawstring *src) -//{ -// raw->size = src->size; -// raw->data = src->data; -// raw->dealloc = src->dealloc; -// _rawstring_init(src); -//} - -static void -_rawstring_proxy(_rawstring *raw, const char *str) +_copy_raw_string(PyObject *strobj) { + const char *str = PyUnicode_AsUTF8(strobj); if (str == NULL) { - str = ""; - } - raw->size = strlen(str); - raw->data = str; - raw->dealloc = NULL; -} - -static int -_rawstring_buffer(_rawstring *raw, Py_ssize_t size) -{ - raw->data = PyMem_Malloc(size+1); - if (raw->data == NULL) { - PyErr_NoMemory(); - return -1; - } - raw->size = size; - raw->dealloc = PyMem_Free; - return 0; -} - -static int -_rawstring_strcpy(_rawstring *raw, const char *str, Py_ssize_t size) -{ - _deallocfunc dealloc = NULL; - const char *copied = _strdup_and_size(str, &size, &dealloc); - if (copied == NULL) { - return -1; - } - - raw->size = size; - raw->dealloc = dealloc; - raw->data = copied; - return 0; -} - -static int -_rawstring_from_pyobj(_rawstring *raw, PyObject *obj) -{ - Py_ssize_t size = 0; - const char *data = _pyobj_get_str_and_size(obj, &size); - if (PyErr_Occurred()) { - return -1; - } - if (_rawstring_strcpy(raw, data, size) != 0) { - return -1; - } - return 0; -} - -static int -_rawstring_from_pyobj_attr(_rawstring *raw, PyObject *obj, const char *attr) -{ - int res = -1; - PyObject *valueobj = PyObject_GetAttrString(obj, attr); - if (valueobj == NULL) { - goto done; - } - if (!PyUnicode_Check(valueobj)) { - // XXX PyObject_Str()? Repr()? - goto done; - } - const char *valuestr = PyUnicode_AsUTF8(valueobj); - if (valuestr == NULL) { - if (PyErr_Occurred()) { - goto done; - } - } else if (_rawstring_strcpy(raw, valuestr, 0) != 0) { - _rawstring_clear(raw); - goto done; - } - res = 0; - -done: - Py_XDECREF(valueobj); - return res; -} - -static PyObject * -_rawstring_as_pybytes(_rawstring *raw) -{ - return PyBytes_FromStringAndSize(raw->data, raw->size); -} - - -/* object utils *************************************************************/ - -static void -_pyobj_identify_type(PyObject *obj, _rawstring *modname, _rawstring *clsname) -{ - PyObject *objtype = (PyObject *)Py_TYPE(obj); - - // Try __module__ and __name__. - if (_rawstring_from_pyobj_attr(modname, objtype, "__module__") != 0) { - // Fall back to the previous values in "modname". - IGNORE_FAILURE("bad __module__"); - } - if (_rawstring_from_pyobj_attr(clsname, objtype, "__name__") != 0) { - // Fall back to the previous values in "clsname". - IGNORE_FAILURE("bad __name__"); - } - - // XXX Fall back to __qualname__? - // XXX Fall back to tp_name? -} - -static PyObject * -_pyobj_get_class(const char *modname, const char *clsname) -{ - assert(clsname != NULL); - if (modname == NULL) { - modname = "builtins"; - } - - PyObject *module = PyImport_ImportModule(modname); - if (module == NULL) { - return NULL; - } - PyObject *cls = PyObject_GetAttrString(module, clsname); - Py_DECREF(module); - return cls; -} - -static PyObject * -_pyobj_create(const char *modname, const char *clsname, PyObject *arg) -{ - PyObject *cls = _pyobj_get_class(modname, clsname); - if (cls == NULL) { return NULL; } - PyObject *obj = NULL; - if (arg == NULL) { - obj = _PyObject_CallNoArg(cls); - } else { - obj = PyObject_CallFunction(cls, "O", arg); - } - Py_DECREF(cls); - return obj; -} - - -/* object snapshots */ - -typedef struct _objsnapshot { - // If modname is NULL then try "builtins" and "__main__". - _rawstring modname; - // clsname is required. - _rawstring clsname; - - // The rest are optional. - - // The serialized exception. - _rawstring *serialized; -} _objsnapshot; - -static void -_objsnapshot_init(_objsnapshot *osn) -{ - _rawstring_init(&osn->modname); - _rawstring_init(&osn->clsname); - osn->serialized = NULL; -} - -//static _objsnapshot * -//_objsnapshot_new(void) -//{ -// _objsnapshot *osn = PyMem_NEW(_objsnapshot, 1); -// if (osn == NULL) { -// PyErr_NoMemory(); -// return NULL; -// } -// _objsnapshot_init(osn); -// return osn; -//} - -static void -_objsnapshot_clear(_objsnapshot *osn) -{ - _rawstring_clear(&osn->modname); - _rawstring_clear(&osn->clsname); - if (osn->serialized != NULL) { - _rawstring_free(osn->serialized); - osn->serialized = NULL; - } -} - -//static void -//_objsnapshot_free(_objsnapshot *osn) -//{ -// _objsnapshot_clear(osn); -// PyMem_Free(osn); -//} - -#ifndef NDEBUG -static int -_objsnapshot_is_clear(_objsnapshot *osn) -{ - return osn->serialized == NULL - && _rawstring_is_clear(&osn->modname) - && _rawstring_is_clear(&osn->clsname); -} -#endif - -static void -_objsnapshot_summarize(_objsnapshot *osn, _rawstring *rawbuf, const char *msg) -{ - if (msg == NULL || *msg == '\0') { - // XXX Keep it NULL? - // XXX Keep it an empty string? - // XXX Use something more informative? - msg = ""; - } - const char *clsname = osn->clsname.data; - const char *modname = osn->modname.data; - if (modname && *modname == '\0') { - modname = NULL; - } - - // Prep the buffer. - Py_ssize_t size = strlen(clsname); - if (modname != NULL) { - if (strcmp(modname, "builtins") == 0) { - modname = NULL; - } else if (strcmp(modname, "__main__") == 0) { - modname = NULL; - } else { - size += strlen(modname) + 1; - } - } - if (msg != NULL) { - size += strlen(": ") + strlen(msg); - } - if (modname != NULL || msg != NULL) { - if (_rawstring_buffer(rawbuf, size) != 0) { - IGNORE_FAILURE("could not summarize object snapshot"); - return; - } - } - // ...else we'll proxy clsname as-is, so no need to allocate a buffer. - - // XXX Use __qualname__ somehow? - char *buf = (char *)rawbuf->data; - if (modname != NULL) { - if (msg != NULL) { - snprintf(buf, size+1, "%s.%s: %s", modname, clsname, msg); - } else { - snprintf(buf, size+1, "%s.%s", modname, clsname); - } - } else if (msg != NULL) { - snprintf(buf, size+1, "%s: %s", clsname, msg); - } else { - _rawstring_proxy(rawbuf, clsname); - } -} - -static _rawstring * -_objsnapshot_get_minimal_summary(_objsnapshot *osn, PyObject *obj) -{ - const char *str = NULL; - PyObject *objstr = PyObject_Str(obj); - if (objstr == NULL) { - PyErr_Clear(); - } else { - str = PyUnicode_AsUTF8(objstr); - if (str == NULL) { - PyErr_Clear(); - } - } - - _rawstring *summary = _rawstring_new(); - if (summary == NULL) { - return NULL; - } - _objsnapshot_summarize(osn, summary, str); - return summary; -} - -static void -_objsnapshot_extract(_objsnapshot *osn, PyObject *obj) -{ - assert(_objsnapshot_is_clear(osn)); - - // Get the "qualname". - _rawstring_proxy(&osn->modname, ""); - _rawstring_proxy(&osn->clsname, ""); - _pyobj_identify_type(obj, &osn->modname, &osn->clsname); - - // Serialize the object. - // XXX Use marshal? - PyObject *pickle = PyImport_ImportModule("pickle"); - if (pickle == NULL) { - IGNORE_FAILURE("could not serialize object: pickle import failed"); - return; - } - PyObject *objdata = PyObject_CallMethod(pickle, "dumps", "(O)", obj); - Py_DECREF(pickle); - if (objdata == NULL) { - IGNORE_FAILURE("could not serialize object: pickle.dumps failed"); - } else { - _rawstring *serialized = _rawstring_new(); - int res = _rawstring_from_pyobj(serialized, objdata); - Py_DECREF(objdata); - if (res != 0) { - IGNORE_FAILURE("could not serialize object: raw str failed"); - _rawstring_free(serialized); - } else if (serialized->size == 0) { - _rawstring_free(serialized); - } else { - osn->serialized = serialized; - } - } -} - -static PyObject * -_objsnapshot_resolve_serialized(_objsnapshot *osn) -{ - assert(osn->serialized != NULL); - - // XXX Use marshal? - PyObject *pickle = PyImport_ImportModule("pickle"); - if (pickle == NULL) { - return NULL; - } - PyObject *objdata = _rawstring_as_pybytes(osn->serialized); - if (objdata == NULL) { - return NULL; - } else { - PyObject *obj = PyObject_CallMethod(pickle, "loads", "O", objdata); - Py_DECREF(objdata); - return obj; - } -} - -static PyObject * -_objsnapshot_resolve_naive(_objsnapshot *osn, PyObject *arg) -{ - if (_rawstring_is_clear(&osn->clsname)) { - // We can't proceed without at least the class name. - PyErr_SetString(PyExc_ValueError, "missing class name"); - return NULL; - } - - if (osn->modname.data != NULL) { - return _pyobj_create(osn->modname.data, osn->clsname.data, arg); - } else { - PyObject *obj = _pyobj_create("builtins", osn->clsname.data, arg); - if (obj == NULL) { - PyErr_Clear(); - obj = _pyobj_create("__main__", osn->clsname.data, arg); - } - return obj; - } -} - -static PyObject * -_objsnapshot_resolve(_objsnapshot *osn) -{ - if (osn->serialized != NULL) { - PyObject *obj = _objsnapshot_resolve_serialized(osn); - if (obj != NULL) { - return obj; - } - IGNORE_FAILURE("could not de-serialize object"); - } - - // Fall back to naive resolution. - return _objsnapshot_resolve_naive(osn, NULL); -} - - -/* exception utils **********************************************************/ - -// _pyexc_create is inspired by _PyErr_SetObject(). - -static PyObject * -_pyexc_create(PyObject *exctype, const char *msg, PyObject *tb) -{ - assert(exctype != NULL && PyExceptionClass_Check(exctype)); - - PyObject *curtype = NULL, *curexc = NULL, *curtb = NULL; - PyErr_Fetch(&curtype, &curexc, &curtb); - - // Create the object. - PyObject *exc = NULL; - if (msg != NULL) { - PyObject *msgobj = PyUnicode_FromString(msg); - if (msgobj == NULL) { - IGNORE_FAILURE("could not deserialize propagated error message"); - } - exc = _PyObject_CallOneArg(exctype, msgobj); - Py_XDECREF(msgobj); - } else { - exc = _PyObject_CallNoArg(exctype); - } - if (exc == NULL) { - return NULL; - } - - // Set the traceback, if any. - if (tb == NULL) { - tb = curtb; - } - if (tb != NULL) { - // This does *not* steal a reference! - PyException_SetTraceback(exc, tb); - } - - PyErr_Restore(curtype, curexc, curtb); - - return exc; -} - -/* traceback snapshots */ - -typedef struct _tbsnapshot { - _rawstring tbs_funcname; - _rawstring tbs_filename; - int tbs_lineno; - struct _tbsnapshot *tbs_next; -} _tbsnapshot; - -static void -_tbsnapshot_init(_tbsnapshot *tbs) -{ - _rawstring_init(&tbs->tbs_funcname); - _rawstring_init(&tbs->tbs_filename); - tbs->tbs_lineno = -1; - tbs->tbs_next = NULL; -} - -static _tbsnapshot * -_tbsnapshot_new(void) -{ - _tbsnapshot *tbs = PyMem_NEW(_tbsnapshot, 1); - if (tbs == NULL) { - PyErr_NoMemory(); - return NULL; - } - _tbsnapshot_init(tbs); - return tbs; -} - -static void _tbsnapshot_free(_tbsnapshot *); // forward - -static void -_tbsnapshot_clear(_tbsnapshot *tbs) -{ - _rawstring_clear(&tbs->tbs_funcname); - _rawstring_clear(&tbs->tbs_filename); - tbs->tbs_lineno = -1; - if (tbs->tbs_next != NULL) { - _tbsnapshot_free(tbs->tbs_next); - tbs->tbs_next = NULL; - } -} - -static void -_tbsnapshot_free(_tbsnapshot *tbs) -{ - _tbsnapshot_clear(tbs); - PyMem_Free(tbs); -} - -#ifndef NDEBUG -static int -_tbsnapshot_is_clear(_tbsnapshot *tbs) -{ - return tbs->tbs_lineno == -1 && tbs->tbs_next == NULL - && _rawstring_is_clear(&tbs->tbs_funcname) - && _rawstring_is_clear(&tbs->tbs_filename); -} -#endif - -static int -_tbsnapshot_from_pytb(_tbsnapshot *tbs, PyTracebackObject *pytb) -{ - assert(_tbsnapshot_is_clear(tbs)); - assert(pytb != NULL); - - PyCodeObject *pycode = pytb->tb_frame->f_code; - const char *funcname = PyUnicode_AsUTF8(pycode->co_name); - if (_rawstring_strcpy(&tbs->tbs_funcname, funcname, 0) != 0) { - goto error; - } - const char *filename = PyUnicode_AsUTF8(pycode->co_filename); - if (_rawstring_strcpy(&tbs->tbs_filename, filename, 0) != 0) { - goto error; - } - tbs->tbs_lineno = pytb->tb_lineno; - - return 0; - -error: - _tbsnapshot_clear(tbs); - return -1; -} - -static int -_tbsnapshot_extract(_tbsnapshot *tbs, PyTracebackObject *pytb) -{ - assert(_tbsnapshot_is_clear(tbs)); - assert(pytb != NULL); - - _tbsnapshot *next = NULL; - while (pytb->tb_next != NULL) { - _tbsnapshot *_next = _tbsnapshot_new(); - if (_next == NULL) { - goto error; - } - if (_tbsnapshot_from_pytb(_next, pytb) != 0) { - goto error; - } - if (next != NULL) { - _next->tbs_next = next; - } - next = _next; - pytb = pytb->tb_next; - } - if (_tbsnapshot_from_pytb(tbs, pytb) != 0) { - goto error; - } - tbs->tbs_next = next; - - return 0; - -error: - _tbsnapshot_clear(tbs); - return -1; -} - -static PyObject * -_tbsnapshot_resolve(_tbsnapshot *tbs) -{ - assert(!PyErr_Occurred()); - // At this point there should be no traceback set yet. - - while (tbs != NULL) { - const char *funcname = tbs->tbs_funcname.data; - const char *filename = tbs->tbs_filename.data; - _PyTraceback_Add(funcname ? funcname : "", - filename ? filename : "", - tbs->tbs_lineno); - tbs = tbs->tbs_next; - } - - PyObject *exctype = NULL, *excval = NULL, *tb = NULL; - PyErr_Fetch(&exctype, &excval, &tb); - // Leave it cleared. - return tb; -} - -/* exception snapshots */ - -typedef struct _excsnapshot { - _objsnapshot es_object; - _rawstring *es_msg; - struct _excsnapshot *es_cause; - struct _excsnapshot *es_context; - char es_suppress_context; - struct _tbsnapshot *es_traceback; -} _excsnapshot; - -static void -_excsnapshot_init(_excsnapshot *es) -{ - _objsnapshot_init(&es->es_object); - es->es_msg = NULL; - es->es_cause = NULL; - es->es_context = NULL; - es->es_suppress_context = 0; - es->es_traceback = NULL; -} - -static _excsnapshot * -_excsnapshot_new(void) { - _excsnapshot *es = PyMem_NEW(_excsnapshot, 1); - if (es == NULL) { + char *copied = PyMem_Malloc(strlen(str)+1); + if (copied == NULL) { PyErr_NoMemory(); return NULL; } - _excsnapshot_init(es); - return es; -} - -static void _excsnapshot_free(_excsnapshot *); // forward - -static void -_excsnapshot_clear(_excsnapshot *es) -{ - _objsnapshot_clear(&es->es_object); - if (es->es_msg != NULL) { - _rawstring_free(es->es_msg); - es->es_msg = NULL; - } - if (es->es_cause != NULL) { - _excsnapshot_free(es->es_cause); - es->es_cause = NULL; - } - if (es->es_context != NULL) { - _excsnapshot_free(es->es_context); - es->es_context = NULL; - } - es->es_suppress_context = 0; - if (es->es_traceback != NULL) { - _tbsnapshot_free(es->es_traceback); - es->es_traceback = NULL; - } -} - -static void -_excsnapshot_free(_excsnapshot *es) -{ - _excsnapshot_clear(es); - PyMem_Free(es); -} - -#ifndef NDEBUG -static int -_excsnapshot_is_clear(_excsnapshot *es) -{ - return es->es_suppress_context == 0 - && es->es_cause == NULL - && es->es_context == NULL - && es->es_traceback == NULL - && es->es_msg == NULL - && _objsnapshot_is_clear(&es->es_object); -} -#endif - -static PyObject * -_excsnapshot_get_exc_naive(_excsnapshot *es) -{ - _rawstring buf; - const char *msg = NULL; - if (es->es_msg != NULL) { - msg = es->es_msg->data; - } else { - _objsnapshot_summarize(&es->es_object, &buf, NULL); - if (buf.size > 0) { - msg = buf.data; - } - } - - PyObject *exc = NULL; - // XXX Use _objsnapshot_resolve_naive()? - const char *modname = es->es_object.modname.size > 0 - ? es->es_object.modname.data - : NULL; - PyObject *exctype = _pyobj_get_class(modname, es->es_object.clsname.data); - if (exctype != NULL) { - exc = _pyexc_create(exctype, msg, NULL); - Py_DECREF(exctype); - if (exc != NULL) { - return exc; - } - PyErr_Clear(); - } else { - PyErr_Clear(); - } - exctype = PyExc_Exception; - return _pyexc_create(exctype, msg, NULL); -} - -static PyObject * -_excsnapshot_get_exc(_excsnapshot *es) -{ - assert(!_objsnapshot_is_clear(&es->es_object)); - - PyObject *exc = _objsnapshot_resolve(&es->es_object); - if (exc == NULL) { - // Fall back to resolving the object. - PyObject *curtype = NULL, *curexc = NULL, *curtb = NULL; - PyErr_Fetch(&curtype, &curexc, &curtb); - - exc = _excsnapshot_get_exc_naive(es); - if (exc == NULL) { - PyErr_Restore(curtype, curexc, curtb); - return NULL; - } - } - // People can do some weird stuff... - if (!PyExceptionInstance_Check(exc)) { - // We got a bogus "exception". - Py_DECREF(exc); - PyErr_SetString(PyExc_TypeError, "expected exception"); - return NULL; - } - return exc; -} - -static void _excsnapshot_extract(_excsnapshot *, PyObject *); -static void -_excsnapshot_extract(_excsnapshot *es, PyObject *excobj) -{ - assert(_excsnapshot_is_clear(es)); - assert(PyExceptionInstance_Check(excobj)); - - _objsnapshot_extract(&es->es_object, excobj); - - es->es_msg = _objsnapshot_get_minimal_summary(&es->es_object, excobj); - if (es->es_msg == NULL) { - PyErr_Clear(); - } - - PyBaseExceptionObject *exc = (PyBaseExceptionObject *)excobj; - - if (exc->cause != NULL && exc->cause != Py_None) { - es->es_cause = _excsnapshot_new(); - _excsnapshot_extract(es->es_cause, exc->cause); - } - - if (exc->context != NULL && exc->context != Py_None) { - es->es_context = _excsnapshot_new(); - _excsnapshot_extract(es->es_context, exc->context); - } - - es->es_suppress_context = exc->suppress_context; - - PyObject *tb = PyException_GetTraceback(excobj); - if (PyErr_Occurred()) { - IGNORE_FAILURE("could not get traceback"); - } else if (tb == Py_None) { - Py_DECREF(tb); - tb = NULL; - } - if (tb != NULL) { - es->es_traceback = _tbsnapshot_new(); - if (_tbsnapshot_extract(es->es_traceback, - (PyTracebackObject *)tb) != 0) { - IGNORE_FAILURE("could not extract __traceback__"); - } - } + strcpy(copied, str); + return copied; } -static PyObject * -_excsnapshot_resolve(_excsnapshot *es) +static PyInterpreterState * +_get_current(void) { - PyObject *exc = _excsnapshot_get_exc(es); - if (exc == NULL) { - return NULL; - } - - if (es->es_traceback != NULL) { - PyObject *tb = _tbsnapshot_resolve(es->es_traceback); - if (tb == NULL) { - // The snapshot is still somewhat useful without this. - IGNORE_FAILURE("could not deserialize traceback"); - } else { - // This does not steal references. - PyException_SetTraceback(exc, tb); - Py_DECREF(tb); - } - } - // NULL means "not set". - - if (es->es_context != NULL) { - PyObject *context = _excsnapshot_resolve(es->es_context); - if (context == NULL) { - // The snapshot is still useful without this. - IGNORE_FAILURE("could not deserialize __context__"); - } else { - // This steals references but we have one to give. - PyException_SetContext(exc, context); - } - } - // NULL means "not set". - - if (es->es_cause != NULL) { - PyObject *cause = _excsnapshot_resolve(es->es_cause); - if (cause == NULL) { - // The snapshot is still useful without this. - IGNORE_FAILURE("could not deserialize __cause__"); - } else { - // This steals references, but we have one to give. - PyException_SetCause(exc, cause); - } - } - // NULL means "not set". - - ((PyBaseExceptionObject *)exc)->suppress_context = es->es_suppress_context; - - return exc; + // PyInterpreterState_Get() aborts if lookup fails, so don't need + // to check the result for NULL. + return PyInterpreterState_Get(); } /* data-sharing-specific code ***********************************************/ -/* shared "object" */ - struct _sharednsitem { - _rawstring name; + char *name; _PyCrossInterpreterData data; }; @@ -935,7 +44,8 @@ static void _sharednsitem_clear(struct _sharednsitem *); // forward static int _sharednsitem_init(struct _sharednsitem *item, PyObject *key, PyObject *value) { - if (_rawstring_from_pyobj(&item->name, key) != 0) { + item->name = _copy_raw_string(key); + if (item->name == NULL) { return -1; } if (_PyObject_GetCrossInterpreterData(value, &item->data) != 0) { @@ -948,14 +58,17 @@ _sharednsitem_init(struct _sharednsitem *item, PyObject *key, PyObject *value) static void _sharednsitem_clear(struct _sharednsitem *item) { - _rawstring_clear(&item->name); + if (item->name != NULL) { + PyMem_Free(item->name); + item->name = NULL; + } _PyCrossInterpreterData_Release(&item->data); } static int _sharednsitem_apply(struct _sharednsitem *item, PyObject *ns) { - PyObject *name = PyUnicode_FromString(item->name.data); + PyObject *name = PyUnicode_FromString(item->name); if (name == NULL) { return -1; } @@ -1046,121 +159,121 @@ _sharedns_apply(_sharedns *shared, PyObject *ns) return 0; } -/* shared exception */ - // Ultimately we'd like to preserve enough information about the // exception and traceback that we could re-constitute (or at least // simulate, a la traceback.TracebackException), and even chain, a copy // of the exception in the calling interpreter. typedef struct _sharedexception { - _excsnapshot snapshot; - _rawstring msg; + char *name; + char *msg; } _sharedexception; -static void -_sharedexception_init(_sharedexception *she) -{ - _excsnapshot_init(&she->snapshot); - _rawstring_init(&she->msg); -} - static _sharedexception * _sharedexception_new(void) { - _sharedexception *she = PyMem_NEW(_sharedexception, 1); - if (she == NULL) { + _sharedexception *err = PyMem_NEW(_sharedexception, 1); + if (err == NULL) { PyErr_NoMemory(); return NULL; } - _sharedexception_init(she); - return she; + err->name = NULL; + err->msg = NULL; + return err; } static void -_sharedexception_clear(_sharedexception *she) +_sharedexception_clear(_sharedexception *exc) { - _excsnapshot_clear(&she->snapshot); - _rawstring_clear(&she->msg); + if (exc->name != NULL) { + PyMem_Free(exc->name); + } + if (exc->msg != NULL) { + PyMem_Free(exc->msg); + } } static void -_sharedexception_free(_sharedexception *she) +_sharedexception_free(_sharedexception *exc) { - _sharedexception_clear(she); - PyMem_Free(she); + _sharedexception_clear(exc); + PyMem_Free(exc); } -#ifndef NDEBUG -static int -_sharedexception_is_clear(_sharedexception *she) +static _sharedexception * +_sharedexception_bind(PyObject *exctype, PyObject *exc, PyObject *tb) { - return 1 - && _excsnapshot_is_clear(&she->snapshot) - && _rawstring_is_clear(&she->msg); -} -#endif + assert(exctype != NULL); + char *failure = NULL; -static PyObject * -_sharedexception_get_cause(_sharedexception *sharedexc) -{ - // FYI, "cause" is already normalized. - PyObject *cause = _excsnapshot_resolve(&sharedexc->snapshot); - if (cause == NULL) { - if (PyErr_Occurred()) { - IGNORE_FAILURE("could not deserialize exc snapshot"); - } - return NULL; + _sharedexception *err = _sharedexception_new(); + if (err == NULL) { + goto finally; } - // XXX Ensure "cause" has a traceback. - return cause; -} -static void -_sharedexception_extract(_sharedexception *she, PyObject *exc) -{ - assert(_sharedexception_is_clear(she)); - assert(exc != NULL); + PyObject *name = PyUnicode_FromFormat("%S", exctype); + if (name == NULL) { + failure = "unable to format exception type name"; + goto finally; + } + err->name = _copy_raw_string(name); + Py_DECREF(name); + if (err->name == NULL) { + if (PyErr_ExceptionMatches(PyExc_MemoryError)) { + failure = "out of memory copying exception type name"; + } else { + failure = "unable to encode and copy exception type name"; + } + goto finally; + } - _excsnapshot_extract(&she->snapshot, exc); + if (exc != NULL) { + PyObject *msg = PyUnicode_FromFormat("%S", exc); + if (msg == NULL) { + failure = "unable to format exception message"; + goto finally; + } + err->msg = _copy_raw_string(msg); + Py_DECREF(msg); + if (err->msg == NULL) { + if (PyErr_ExceptionMatches(PyExc_MemoryError)) { + failure = "out of memory copying exception message"; + } else { + failure = "unable to encode and copy exception message"; + } + goto finally; + } + } - // Compose the message. - const char *msg = NULL; - PyObject *msgobj = PyUnicode_FromFormat("%S", exc); - if (msgobj == NULL) { - IGNORE_FAILURE("unable to format exception message"); - } else { - msg = PyUnicode_AsUTF8(msgobj); - if (PyErr_Occurred()) { - PyErr_Clear(); +finally: + if (failure != NULL) { + PyErr_Clear(); + if (err->name != NULL) { + PyMem_Free(err->name); + err->name = NULL; } + err->msg = failure; } - _objsnapshot_summarize(&she->snapshot.es_object, &she->msg, msg); - Py_XDECREF(msgobj); + return err; } -static PyObject * -_sharedexception_resolve(_sharedexception *sharedexc, PyObject *wrapperclass) +static void +_sharedexception_apply(_sharedexception *exc, PyObject *wrapperclass) { - assert(!PyErr_Occurred()); - - // Get the exception object (already normalized). - PyObject *exc = _pyexc_create(wrapperclass, sharedexc->msg.data, NULL); - assert(exc != NULL); - - // Set __cause__, is possible. - PyObject *cause = _sharedexception_get_cause(sharedexc); - if (cause != NULL) { - // Set __context__. - Py_INCREF(cause); // PyException_SetContext() steals a reference. - PyException_SetContext(exc, cause); - - // Set __cause__. - Py_INCREF(cause); // PyException_SetCause() steals a reference. - PyException_SetCause(exc, cause); + if (exc->name != NULL) { + if (exc->msg != NULL) { + PyErr_Format(wrapperclass, "%s: %s", exc->name, exc->msg); + } + else { + PyErr_SetString(wrapperclass, exc->name); + } + } + else if (exc->msg != NULL) { + PyErr_SetString(wrapperclass, exc->msg); + } + else { + PyErr_SetNone(wrapperclass); } - - return exc; } @@ -2756,9 +1869,11 @@ _ensure_not_running(PyInterpreterState *interp) static int _run_script(PyInterpreterState *interp, const char *codestr, - _sharedns *shared, _sharedexception **pexc) + _sharedns *shared, _sharedexception **exc) { - assert(!PyErr_Occurred()); // ...in the called interpreter. + PyObject *exctype = NULL; + PyObject *excval = NULL; + PyObject *tb = NULL; PyObject *main_mod = _PyInterpreterState_GetMainModule(interp); if (main_mod == NULL) { @@ -2789,38 +1904,25 @@ _run_script(PyInterpreterState *interp, const char *codestr, Py_DECREF(result); // We throw away the result. } - *pexc = NULL; + *exc = NULL; return 0; - PyObject *exctype = NULL, *exc = NULL, *tb = NULL; error: - PyErr_Fetch(&exctype, &exc, &tb); - - // First normalize the exception. - PyErr_NormalizeException(&exctype, &exc, &tb); - assert(PyExceptionInstance_Check(exc)); - if (tb != NULL) { - PyException_SetTraceback(exc, tb); - } - - // Behave as though the exception was caught in this thread. - PyErr_SetExcInfo(exctype, exc, tb); // Like entering "except" block. + PyErr_Fetch(&exctype, &excval, &tb); - // Serialize the exception. - _sharedexception *sharedexc = _sharedexception_new(); + _sharedexception *sharedexc = _sharedexception_bind(exctype, excval, tb); + Py_XDECREF(exctype); + Py_XDECREF(excval); + Py_XDECREF(tb); if (sharedexc == NULL) { - IGNORE_FAILURE("script raised an uncaught exception"); - } else { - _sharedexception_extract(sharedexc, exc); + fprintf(stderr, "RunFailedError: script raised an uncaught exception"); + PyErr_Clear(); + sharedexc = NULL; + } + else { assert(!PyErr_Occurred()); } - - // Clear the exception. - PyErr_SetExcInfo(NULL, NULL, NULL); // Like leaving "except" block. - PyErr_Clear(); // Do not re-raise. - - // "Return" the serialized exception. - *pexc = sharedexc; + *exc = sharedexc; return -1; } @@ -2828,8 +1930,6 @@ static int _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, PyObject *shareables) { - assert(!PyErr_Occurred()); // ...in the calling interpreter. - if (_ensure_not_running(interp) < 0) { return -1; } @@ -2863,8 +1963,8 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, } // Run the script. - _sharedexception *sharedexc = NULL; - int result = _run_script(interp, codestr, shared, &sharedexc); + _sharedexception *exc = NULL; + int result = _run_script(interp, codestr, shared, &exc); // Switch back. if (save_tstate != NULL) { @@ -2873,14 +1973,9 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, #endif // Propagate any exception out to the caller. - if (sharedexc != NULL) { - assert(!PyErr_Occurred()); - PyObject *exc = _sharedexception_resolve(sharedexc, RunFailedError); - // XXX This is not safe once interpreters no longer share allocators. - _sharedexception_free(sharedexc); - PyObject *exctype = (PyObject *)Py_TYPE(exc); - Py_INCREF(exctype); // PyErr_Restore() steals a reference. - PyErr_Restore(exctype, exc, PyException_GetTraceback(exc)); + if (exc != NULL) { + _sharedexception_apply(exc, RunFailedError); + _sharedexception_free(exc); } else if (result != 0) { // We were unable to allocate a shared exception. From a482dc500b6ec4889f6a126ba08cbad6c11e37bc Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 14 May 2020 21:55:47 +0200 Subject: [PATCH 098/115] bpo-40602: Write unit tests for _Py_hashtable_t (GH-20091) Cleanup also hashtable.c. Rename _Py_hashtable_t members: * Rename entries to nentries * Rename num_buckets to nbuckets --- Include/internal/pycore_hashtable.h | 23 ++-- Modules/_testinternalcapi.c | 88 ++++++++++++++++ Python/hashtable.c | 158 +++++++++------------------- Python/marshal.c | 2 +- 4 files changed, 151 insertions(+), 120 deletions(-) diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 2990f9e0c1cc6f..18757abc28c195 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -48,18 +48,18 @@ typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t * const void *key); typedef struct { - /* allocate a memory block */ + // Allocate a memory block void* (*malloc) (size_t size); - /* release a memory block */ + // Release a memory block void (*free) (void *ptr); } _Py_hashtable_allocator_t; /* _Py_hashtable: table */ struct _Py_hashtable_t { - size_t num_buckets; - size_t entries; /* Total number of entries in the table. */ + size_t nentries; // Total number of entries in the table + size_t nbuckets; _Py_slist_t *buckets; _Py_hashtable_get_entry_func get_entry_func; @@ -70,10 +70,10 @@ struct _Py_hashtable_t { _Py_hashtable_allocator_t alloc; }; -/* hash a pointer (void*) */ +/* Hash a pointer (void*) */ PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr(const void *key); -/* comparison using memcmp() */ +/* Comparison using memcmp() */ PyAPI_FUNC(int) _Py_hashtable_compare_direct( const void *key1, const void *key2); @@ -129,13 +129,14 @@ _Py_hashtable_get_entry(_Py_hashtable_t *ht, const void *key) Use _Py_hashtable_get_entry() to distinguish entry value equal to NULL and entry not found. */ -extern void *_Py_hashtable_get(_Py_hashtable_t *ht, const void *key); +PyAPI_FUNC(void*) _Py_hashtable_get(_Py_hashtable_t *ht, const void *key); -// Remove a key and its associated value without calling key and value destroy -// functions. -// Return the removed value if the key was found. -// Return NULL if the key was not found. +/* Remove a key and its associated value without calling key and value destroy + functions. + + Return the removed value if the key was found. + Return NULL if the key was not found. */ PyAPI_FUNC(void*) _Py_hashtable_steal( _Py_hashtable_t *ht, const void *key); diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 1b7563cb20fc5d..3ae387d945d761 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -14,6 +14,7 @@ #include "Python.h" #include "pycore_byteswap.h" // _Py_bswap32() #include "pycore_initconfig.h" // _Py_GetConfigsAsDict() +#include "pycore_hashtable.h" // _Py_hashtable_new() #include "pycore_gc.h" // PyGC_Head @@ -62,10 +63,97 @@ test_bswap(PyObject *self, PyObject *Py_UNUSED(args)) } +#define TO_PTR(ch) ((void*)(uintptr_t)ch) +#define FROM_PTR(ptr) ((uintptr_t)ptr) +#define VALUE(key) (1 + ((int)(key) - 'a')) + +static Py_uhash_t +hash_char(const void *key) +{ + char ch = (char)FROM_PTR(key); + return ch; +} + + +static int +hashtable_cb(_Py_hashtable_t *table, + const void *key_ptr, const void *value_ptr, + void *user_data) +{ + int *count = (int *)user_data; + char key = (char)FROM_PTR(key_ptr); + int value = (int)FROM_PTR(value_ptr); + assert(value == VALUE(key)); + *count += 1; + return 0; +} + + +static PyObject* +test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) +{ + _Py_hashtable_t *table = _Py_hashtable_new(hash_char, + _Py_hashtable_compare_direct); + if (table == NULL) { + return PyErr_NoMemory(); + } + + // Test _Py_hashtable_set() + char key; + for (key='a'; key <= 'z'; key++) { + int value = VALUE(key); + if (_Py_hashtable_set(table, TO_PTR(key), TO_PTR(value)) < 0) { + _Py_hashtable_destroy(table); + return PyErr_NoMemory(); + } + } + assert(table->nentries == 26); + assert(table->nbuckets > table->nentries); + + // Test _Py_hashtable_get_entry() + for (key='a'; key <= 'z'; key++) { + _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry(table, TO_PTR(key)); + assert(entry != NULL); + assert(entry->key = TO_PTR(key)); + assert(entry->value = TO_PTR(VALUE(key))); + } + + // Test _Py_hashtable_get() + for (key='a'; key <= 'z'; key++) { + void *value_ptr = _Py_hashtable_get(table, TO_PTR(key)); + int value = (int)FROM_PTR(value_ptr); + assert(value == VALUE(key)); + } + + // Test _Py_hashtable_steal() + key = 'p'; + void *value_ptr = _Py_hashtable_steal(table, TO_PTR(key)); + int value = (int)FROM_PTR(value_ptr); + assert(value == VALUE(key)); + + assert(table->nentries == 25); + + // Test _Py_hashtable_foreach() + int count = 0; + int res = _Py_hashtable_foreach(table, hashtable_cb, &count); + assert(res == 0); + assert(count == 25); + + // Test _Py_hashtable_clear() + _Py_hashtable_clear(table); + assert(table->nentries == 0); + assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL); + + _Py_hashtable_destroy(table); + Py_RETURN_NONE; +} + + static PyMethodDef TestMethods[] = { {"get_configs", get_configs, METH_NOARGS}, {"get_recursion_depth", get_recursion_depth, METH_NOARGS}, {"test_bswap", test_bswap, METH_NOARGS}, + {"test_hashtable", test_hashtable, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; diff --git a/Python/hashtable.c b/Python/hashtable.c index d1467ad94ed55c..45c52859ac2d6c 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -119,66 +119,20 @@ round_size(size_t s) size_t _Py_hashtable_size(const _Py_hashtable_t *ht) { - size_t size; - - size = sizeof(_Py_hashtable_t); - + size_t size = sizeof(_Py_hashtable_t); /* buckets */ - size += ht->num_buckets * sizeof(_Py_hashtable_entry_t *); - + size += ht->nbuckets * sizeof(_Py_hashtable_entry_t *); /* entries */ - size += ht->entries * sizeof(_Py_hashtable_entry_t); - + size += ht->nentries * sizeof(_Py_hashtable_entry_t); return size; } -#ifdef Py_DEBUG -void -_Py_hashtable_print_stats(_Py_hashtable_t *ht) -{ - size_t size; - size_t chain_len, max_chain_len, total_chain_len, nchains; - _Py_hashtable_entry_t *entry; - size_t hv; - double load; - - size = _Py_hashtable_size(ht); - - load = (double)ht->entries / ht->num_buckets; - - max_chain_len = 0; - total_chain_len = 0; - nchains = 0; - for (hv = 0; hv < ht->num_buckets; hv++) { - entry = TABLE_HEAD(ht, hv); - if (entry != NULL) { - chain_len = 0; - for (; entry; entry = ENTRY_NEXT(entry)) { - chain_len++; - } - if (chain_len > max_chain_len) - max_chain_len = chain_len; - total_chain_len += chain_len; - nchains++; - } - } - printf("hash table %p: entries=%" - PY_FORMAT_SIZE_T "u/%" PY_FORMAT_SIZE_T "u (%.0f%%), ", - (void *)ht, ht->entries, ht->num_buckets, load * 100.0); - if (nchains) - printf("avg_chain_len=%.1f, ", (double)total_chain_len / nchains); - printf("max_chain_len=%" PY_FORMAT_SIZE_T "u, %" PY_FORMAT_SIZE_T "u KiB\n", - max_chain_len, size / 1024); -} -#endif - - _Py_hashtable_entry_t * _Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) { Py_uhash_t key_hash = ht->hash_func(key); - size_t index = key_hash & (ht->num_buckets - 1); + size_t index = key_hash & (ht->nbuckets - 1); _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); while (1) { if (entry == NULL) { @@ -200,7 +154,7 @@ static _Py_hashtable_entry_t * _Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) { Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key); - size_t index = key_hash & (ht->num_buckets - 1); + size_t index = key_hash & (ht->nbuckets - 1); _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); while (1) { if (entry == NULL) { @@ -220,7 +174,7 @@ void* _Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) { Py_uhash_t key_hash = ht->hash_func(key); - size_t index = key_hash & (ht->num_buckets - 1); + size_t index = key_hash & (ht->nbuckets - 1); _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, index); _Py_hashtable_entry_t *previous = NULL; @@ -238,12 +192,12 @@ _Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) _Py_slist_remove(&ht->buckets[index], (_Py_slist_item_t *)previous, (_Py_slist_item_t *)entry); - ht->entries--; + ht->nentries--; void *value = entry->value; ht->alloc.free(entry); - if ((float)ht->entries / (float)ht->num_buckets < HASHTABLE_LOW) { + if ((float)ht->nentries / (float)ht->nbuckets < HASHTABLE_LOW) { hashtable_rehash(ht); } return value; @@ -263,8 +217,6 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value) assert(entry == NULL); #endif - Py_uhash_t key_hash = ht->hash_func(key); - size_t index = key_hash & (ht->num_buckets - 1); entry = ht->alloc.malloc(sizeof(_Py_hashtable_entry_t)); if (entry == NULL) { @@ -272,15 +224,17 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value) return -1; } - entry->key_hash = key_hash; + entry->key_hash = ht->hash_func(key); entry->key = (void *)key; entry->value = value; + size_t index = entry->key_hash & (ht->nbuckets - 1); _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); - ht->entries++; + ht->nentries++; - if ((float)ht->entries / (float)ht->num_buckets > HASHTABLE_HIGH) + if ((float)ht->nentries / (float)ht->nbuckets > HASHTABLE_HIGH) { hashtable_rehash(ht); + } return 0; } @@ -303,14 +257,14 @@ _Py_hashtable_foreach(_Py_hashtable_t *ht, _Py_hashtable_foreach_func func, void *user_data) { - _Py_hashtable_entry_t *entry; - size_t hv; - - for (hv = 0; hv < ht->num_buckets; hv++) { - for (entry = TABLE_HEAD(ht, hv); entry; entry = ENTRY_NEXT(entry)) { + for (size_t hv = 0; hv < ht->nbuckets; hv++) { + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, hv); + while (entry != NULL) { int res = func(ht, entry->key, entry->value, user_data); - if (res) + if (res) { return res; + } + entry = ENTRY_NEXT(entry); } } return 0; @@ -320,44 +274,35 @@ _Py_hashtable_foreach(_Py_hashtable_t *ht, static void hashtable_rehash(_Py_hashtable_t *ht) { - size_t buckets_size, new_size, bucket; - _Py_slist_t *old_buckets = NULL; - size_t old_num_buckets; - - new_size = round_size((size_t)(ht->entries * HASHTABLE_REHASH_FACTOR)); - if (new_size == ht->num_buckets) + size_t new_size = round_size((size_t)(ht->nentries * HASHTABLE_REHASH_FACTOR)); + if (new_size == ht->nbuckets) { return; + } - old_num_buckets = ht->num_buckets; - - buckets_size = new_size * sizeof(ht->buckets[0]); - old_buckets = ht->buckets; - ht->buckets = ht->alloc.malloc(buckets_size); - if (ht->buckets == NULL) { - /* cancel rehash on memory allocation failure */ - ht->buckets = old_buckets ; + size_t buckets_size = new_size * sizeof(ht->buckets[0]); + _Py_slist_t *new_buckets = ht->alloc.malloc(buckets_size); + if (new_buckets == NULL) { /* memory allocation failed */ return; } - memset(ht->buckets, 0, buckets_size); - - ht->num_buckets = new_size; - - for (bucket = 0; bucket < old_num_buckets; bucket++) { - _Py_hashtable_entry_t *entry, *next; - for (entry = BUCKETS_HEAD(old_buckets[bucket]); entry != NULL; entry = next) { - size_t entry_index; - + memset(new_buckets, 0, buckets_size); + for (size_t bucket = 0; bucket < ht->nbuckets; bucket++) { + _Py_hashtable_entry_t *entry = BUCKETS_HEAD(ht->buckets[bucket]); + while (entry != NULL) { assert(ht->hash_func(entry->key) == entry->key_hash); - next = ENTRY_NEXT(entry); - entry_index = entry->key_hash & (new_size - 1); + _Py_hashtable_entry_t *next = ENTRY_NEXT(entry); + size_t entry_index = entry->key_hash & (new_size - 1); + + _Py_slist_prepend(&new_buckets[entry_index], (_Py_slist_item_t*)entry); - _Py_slist_prepend(&ht->buckets[entry_index], (_Py_slist_item_t*)entry); + entry = next; } } - ht->alloc.free(old_buckets); + ht->alloc.free(ht->buckets); + ht->nbuckets = new_size; + ht->buckets = new_buckets; } @@ -368,10 +313,7 @@ _Py_hashtable_new_full(_Py_hashtable_hash_func hash_func, _Py_hashtable_destroy_func value_destroy_func, _Py_hashtable_allocator_t *allocator) { - _Py_hashtable_t *ht; - size_t buckets_size; _Py_hashtable_allocator_t alloc; - if (allocator == NULL) { alloc.malloc = PyMem_Malloc; alloc.free = PyMem_Free; @@ -380,14 +322,15 @@ _Py_hashtable_new_full(_Py_hashtable_hash_func hash_func, alloc = *allocator; } - ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); - if (ht == NULL) + _Py_hashtable_t *ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); + if (ht == NULL) { return ht; + } - ht->num_buckets = HASHTABLE_MIN_SIZE; - ht->entries = 0; + ht->nbuckets = HASHTABLE_MIN_SIZE; + ht->nentries = 0; - buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); + size_t buckets_size = ht->nbuckets * sizeof(ht->buckets[0]); ht->buckets = alloc.malloc(buckets_size); if (ht->buckets == NULL) { alloc.free(ht); @@ -435,17 +378,16 @@ _Py_hashtable_destroy_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry) void _Py_hashtable_clear(_Py_hashtable_t *ht) { - _Py_hashtable_entry_t *entry, *next; - size_t i; - - for (i=0; i < ht->num_buckets; i++) { - for (entry = TABLE_HEAD(ht, i); entry != NULL; entry = next) { - next = ENTRY_NEXT(entry); + for (size_t i=0; i < ht->nbuckets; i++) { + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, i); + while (entry != NULL) { + _Py_hashtable_entry_t *next = ENTRY_NEXT(entry); _Py_hashtable_destroy_entry(ht, entry); + entry = next; } _Py_slist_init(&ht->buckets[i]); } - ht->entries = 0; + ht->nentries = 0; hashtable_rehash(ht); } @@ -453,7 +395,7 @@ _Py_hashtable_clear(_Py_hashtable_t *ht) void _Py_hashtable_destroy(_Py_hashtable_t *ht) { - for (size_t i = 0; i < ht->num_buckets; i++) { + for (size_t i = 0; i < ht->nbuckets; i++) { _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, i); while (entry) { _Py_hashtable_entry_t *entry_next = ENTRY_NEXT(entry); diff --git a/Python/marshal.c b/Python/marshal.c index b096ff89322209..a0f6b9812601be 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -312,7 +312,7 @@ w_ref(PyObject *v, char *flag, WFILE *p) w_long(w, p); return 1; } else { - size_t s = p->hashtable->entries; + size_t s = p->hashtable->nentries; /* we don't support long indices */ if (s >= 0x7fffffff) { PyErr_SetString(PyExc_ValueError, "too many objects"); From bcc30360951a303aa72b0502b77aad2c5f09f30d Mon Sep 17 00:00:00 2001 From: Pablo Galindo Date: Thu, 14 May 2020 21:11:48 +0100 Subject: [PATCH 099/115] bpo-40619: Correctly handle error lines in programs without file mode (GH-20090) --- Lib/test/test_exceptions.py | 2 ++ Parser/pegen/pegen.c | 30 +++++------------------------- 2 files changed, 7 insertions(+), 25 deletions(-) diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index b689ec7aed18de..efd77fdbaabe1b 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -228,6 +228,8 @@ def bar(): def baz(): '''quux''' """, 9, 20) + check("pass\npass\npass\n(1+)\npass\npass\npass", 4, 4) + check("(1+)", 1, 4) # Errors thrown by symtable.c check('x = [(yield i) for i in range(3)]', 1, 5) diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 083088bd9657bd..8b79a7364758e4 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -300,30 +300,6 @@ raise_tokenizer_init_error(PyObject *filename) Py_XDECREF(tuple); } -static inline PyObject * -get_error_line(char *buffer, int is_file) -{ - const char *newline; - if (is_file) { - newline = strrchr(buffer, '\n'); - } else { - newline = strchr(buffer, '\n'); - } - - if (is_file) { - while (newline > buffer && newline[-1] == '\n') { - --newline; - } - } - - if (newline) { - return PyUnicode_DecodeUTF8(buffer, newline - buffer, "replace"); - } - else { - return PyUnicode_DecodeUTF8(buffer, strlen(buffer), "replace"); - } -} - static int tokenizer_error(Parser *p) { @@ -422,7 +398,11 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, } if (!error_line) { - error_line = get_error_line(p->tok->buf, p->start_rule == Py_file_input); + Py_ssize_t size = p->tok->inp - p->tok->buf; + if (size && p->tok->buf[size-1] == '\n') { + size--; + } + error_line = PyUnicode_DecodeUTF8(p->tok->buf, size, "replace"); if (!error_line) { goto error; } From ce21cfca7bb2d18921bc4ac27cb064726996c519 Mon Sep 17 00:00:00 2001 From: Lysandros Nikolaou Date: Thu, 14 May 2020 23:13:50 +0300 Subject: [PATCH 100/115] bpo-40618: Disallow invalid targets in augassign and except clauses (GH-20083) This commit fixes the new parser to disallow invalid targets in the following scenarios: - Augmented assignments must only accept a single target (Name, Attribute or Subscript), but no tuples or lists. - `except` clauses should only accept a single `Name` as a target. Co-authored-by: Pablo Galindo --- Grammar/python.gram | 17 +++++---- Lib/test/test_grammar.py | 5 ++- Lib/test/test_peg_parser.py | 25 ++++++++++++++ Lib/test/test_syntax.py | 12 +++++++ Parser/pegen/parse.c | 69 ++++++++++++++++++------------------- 5 files changed, 82 insertions(+), 46 deletions(-) diff --git a/Grammar/python.gram b/Grammar/python.gram index 84c89330e3ee9d..9087c7aa718b17 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -89,12 +89,12 @@ assignment[stmt_ty]: "Variable annotation syntax is", _Py_AnnAssign(CHECK(_PyPegen_set_expr_context(p, a, Store)), b, c, 1, EXTRA) ) } - | a=('(' b=inside_paren_ann_assign_target ')' { b } - | ann_assign_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] { + | a=('(' b=single_target ')' { b } + | single_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] { CHECK_VERSION(6, "Variable annotations syntax is", _Py_AnnAssign(a, b, c, 0, EXTRA)) } | a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) tc=[TYPE_COMMENT] { _Py_Assign(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | a=target b=augassign c=(yield_expr | star_expressions) { + | a=single_target b=augassign c=(yield_expr | star_expressions) { _Py_AugAssign(a, b->kind, c, EXTRA) } | invalid_assignment @@ -185,7 +185,7 @@ try_stmt[stmt_ty]: | 'try' ':' b=block f=finally_block { _Py_Try(b, NULL, NULL, f, EXTRA) } | 'try' ':' b=block ex=except_block+ el=[else_block] f=[finally_block] { _Py_Try(b, ex, el, f, EXTRA) } except_block[excepthandler_ty]: - | 'except' e=expression t=['as' z=target { z }] ':' b=block { + | 'except' e=expression t=['as' z=NAME { z }] ':' b=block { _Py_ExceptHandler(e, (t) ? ((expr_ty) t)->v.Name.id : NULL, b, EXTRA) } | 'except' ':' b=block { _Py_ExceptHandler(NULL, NULL, b, EXTRA) } finally_block[asdl_seq*]: 'finally' ':' a=block { a } @@ -573,12 +573,11 @@ star_atom[expr_ty]: | '(' a=[star_targets_seq] ')' { _Py_Tuple(a, Store, EXTRA) } | '[' a=[star_targets_seq] ']' { _Py_List(a, Store, EXTRA) } -inside_paren_ann_assign_target[expr_ty]: - | ann_assign_subscript_attribute_target +single_target[expr_ty]: + | single_subscript_attribute_target | a=NAME { _PyPegen_set_expr_context(p, a, Store) } - | '(' a=inside_paren_ann_assign_target ')' { a } - -ann_assign_subscript_attribute_target[expr_ty]: + | '(' a=single_target ')' { a } +single_subscript_attribute_target[expr_ty]: | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Store, EXTRA) } | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) } diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index 02ba8a8b1579a5..e1a402e2b463b1 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -1279,7 +1279,7 @@ def __getitem__(self, i): def test_try(self): ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] ### | 'try' ':' suite 'finally' ':' suite - ### except_clause: 'except' [expr ['as' expr]] + ### except_clause: 'except' [expr ['as' NAME]] try: 1/0 except ZeroDivisionError: @@ -1297,6 +1297,9 @@ def test_try(self): except (EOFError, TypeError, ZeroDivisionError) as msg: pass try: pass finally: pass + with self.assertRaises(SyntaxError): + compile("try:\n pass\nexcept Exception as a.b:\n pass", "?", "exec") + compile("try:\n pass\nexcept Exception as a[b]:\n pass", "?", "exec") def test_suite(self): # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py index df2d46d8827f0e..71e071940de2f4 100644 --- a/Lib/test/test_peg_parser.py +++ b/Lib/test/test_peg_parser.py @@ -35,6 +35,9 @@ ('attribute_simple', 'a.b'), ('attributes_subscript', 'a.b[0]'), ('augmented_assignment', 'x += 42'), + ('augmented_assignment_attribute', 'a.b.c += 42'), + ('augmented_assignment_paren', '(x) += 42'), + ('augmented_assignment_paren_subscript', '(x[0]) -= 42'), ('binop_add', '1 + 1'), ('binop_add_multiple', '1 + 1 + 1 + 1'), ('binop_all', '1 + 2 * 5 + 3 ** 2 - -3'), @@ -547,6 +550,11 @@ def f(*a, b): with a as (x, y): pass '''), + ('with_list_target', + ''' + with a as [x, y]: + pass + '''), ('yield', 'yield'), ('yield_expr', 'yield a'), ('yield_from', 'yield from a'), @@ -560,6 +568,9 @@ def f(*a, b): ("annotation_tuple", "(a,): int"), ("annotation_tuple_without_paren", "a,: int"), ("assignment_keyword", "a = if"), + ("augmented_assignment_list", "[a, b] += 1"), + ("augmented_assignment_tuple", "a, b += 1"), + ("augmented_assignment_tuple_paren", "(a, b) += (1, 2)"), ("comprehension_lambda", "(a for a in lambda: b)"), ("comprehension_else", "(a for a in b if c else d"), ("del_call", "del a()"), @@ -589,6 +600,20 @@ def f(): a """), ("not_terminated_string", "a = 'example"), + ("try_except_attribute_target", + """ + try: + pass + except Exception as a.b: + pass + """), + ("try_except_subscript_target", + """ + try: + pass + except Exception as a[0]: + pass + """), ] FAIL_SPECIALIZED_MESSAGE_CASES = [ diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 06636ae8a149a4..a3a101534628a2 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -129,6 +129,18 @@ Traceback (most recent call last): SyntaxError: cannot assign to conditional expression +>>> a, b += 1, 2 +Traceback (most recent call last): +SyntaxError: invalid syntax + +>>> (a, b) += 1, 2 +Traceback (most recent call last): +SyntaxError: cannot assign to tuple + +>>> [a, b] += 1, 2 +Traceback (most recent call last): +SyntaxError: cannot assign to list + From compiler_complex_args(): >>> def f(None=1): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index b1b248187ea3ed..851d17226d162f 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -199,8 +199,8 @@ static KeywordToken *reserved_keywords[] = { #define star_targets_seq_type 1128 #define star_target_type 1129 #define star_atom_type 1130 -#define inside_paren_ann_assign_target_type 1131 -#define ann_assign_subscript_attribute_target_type 1132 +#define single_target_type 1131 +#define single_subscript_attribute_target_type 1132 #define del_targets_type 1133 #define del_target_type 1134 #define del_t_atom_type 1135 @@ -501,8 +501,8 @@ static expr_ty star_targets_rule(Parser *p); static asdl_seq* star_targets_seq_rule(Parser *p); static expr_ty star_target_rule(Parser *p); static expr_ty star_atom_rule(Parser *p); -static expr_ty inside_paren_ann_assign_target_rule(Parser *p); -static expr_ty ann_assign_subscript_attribute_target_rule(Parser *p); +static expr_ty single_target_rule(Parser *p); +static expr_ty single_subscript_attribute_target_rule(Parser *p); static asdl_seq* del_targets_rule(Parser *p); static expr_ty del_target_rule(Parser *p); static expr_ty del_t_atom_rule(Parser *p); @@ -1590,9 +1590,9 @@ compound_stmt_rule(Parser *p) // assignment: // | NAME ':' expression ['=' annotated_rhs] -// | ('(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target) ':' expression ['=' annotated_rhs] +// | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] // | ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? -// | target augassign (yield_expr | star_expressions) +// | single_target augassign (yield_expr | star_expressions) // | invalid_assignment static stmt_ty assignment_rule(Parser *p) @@ -1642,13 +1642,13 @@ assignment_rule(Parser *p) } p->mark = _mark; } - { // ('(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target) ':' expression ['=' annotated_rhs] + { // ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] Token * _literal; void *a; expr_ty b; void *c; if ( - (a = _tmp_20_rule(p)) // '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target + (a = _tmp_20_rule(p)) // '(' single_target ')' | single_subscript_attribute_target && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -1703,12 +1703,12 @@ assignment_rule(Parser *p) } p->mark = _mark; } - { // target augassign (yield_expr | star_expressions) + { // single_target augassign (yield_expr | star_expressions) expr_ty a; AugOperator* b; void *c; if ( - (a = target_rule(p)) // target + (a = single_target_rule(p)) // single_target && (b = augassign_rule(p)) // augassign && @@ -3350,7 +3350,7 @@ try_stmt_rule(Parser *p) return _res; } -// except_block: 'except' expression ['as' target] ':' block | 'except' ':' block +// except_block: 'except' expression ['as' NAME] ':' block | 'except' ':' block static excepthandler_ty except_block_rule(Parser *p) { @@ -3367,7 +3367,7 @@ except_block_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'except' expression ['as' target] ':' block + { // 'except' expression ['as' NAME] ':' block Token * _keyword; Token * _literal; asdl_seq* b; @@ -3378,7 +3378,7 @@ except_block_rule(Parser *p) && (e = expression_rule(p)) // expression && - (t = _tmp_48_rule(p), 1) // ['as' target] + (t = _tmp_48_rule(p), 1) // ['as' NAME] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -9605,25 +9605,22 @@ star_atom_rule(Parser *p) return _res; } -// inside_paren_ann_assign_target: -// | ann_assign_subscript_attribute_target -// | NAME -// | '(' inside_paren_ann_assign_target ')' +// single_target: single_subscript_attribute_target | NAME | '(' single_target ')' static expr_ty -inside_paren_ann_assign_target_rule(Parser *p) +single_target_rule(Parser *p) { if (p->error_indicator) { return NULL; } expr_ty _res = NULL; int _mark = p->mark; - { // ann_assign_subscript_attribute_target - expr_ty ann_assign_subscript_attribute_target_var; + { // single_subscript_attribute_target + expr_ty single_subscript_attribute_target_var; if ( - (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target + (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target ) { - _res = ann_assign_subscript_attribute_target_var; + _res = single_subscript_attribute_target_var; goto done; } p->mark = _mark; @@ -9643,14 +9640,14 @@ inside_paren_ann_assign_target_rule(Parser *p) } p->mark = _mark; } - { // '(' inside_paren_ann_assign_target ')' + { // '(' single_target ')' Token * _literal; Token * _literal_1; expr_ty a; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target + (a = single_target_rule(p)) // single_target && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) @@ -9669,11 +9666,11 @@ inside_paren_ann_assign_target_rule(Parser *p) return _res; } -// ann_assign_subscript_attribute_target: +// single_subscript_attribute_target: // | t_primary '.' NAME !t_lookahead // | t_primary '[' slices ']' !t_lookahead static expr_ty -ann_assign_subscript_attribute_target_rule(Parser *p) +single_subscript_attribute_target_rule(Parser *p) { if (p->error_indicator) { return NULL; @@ -11907,7 +11904,7 @@ _tmp_19_rule(Parser *p) return _res; } -// _tmp_20: '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target +// _tmp_20: '(' single_target ')' | single_subscript_attribute_target static void * _tmp_20_rule(Parser *p) { @@ -11916,14 +11913,14 @@ _tmp_20_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // '(' inside_paren_ann_assign_target ')' + { // '(' single_target ')' Token * _literal; Token * _literal_1; expr_ty b; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target + (b = single_target_rule(p)) // single_target && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) @@ -11937,13 +11934,13 @@ _tmp_20_rule(Parser *p) } p->mark = _mark; } - { // ann_assign_subscript_attribute_target - expr_ty ann_assign_subscript_attribute_target_var; + { // single_subscript_attribute_target + expr_ty single_subscript_attribute_target_var; if ( - (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target + (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target ) { - _res = ann_assign_subscript_attribute_target_var; + _res = single_subscript_attribute_target_var; goto done; } p->mark = _mark; @@ -13073,7 +13070,7 @@ _loop1_47_rule(Parser *p) return _seq; } -// _tmp_48: 'as' target +// _tmp_48: 'as' NAME static void * _tmp_48_rule(Parser *p) { @@ -13082,13 +13079,13 @@ _tmp_48_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // 'as' target + { // 'as' NAME Token * _keyword; expr_ty z; if ( (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = target_rule(p)) // target + (z = _PyPegen_name_token(p)) // NAME ) { _res = z; From d2dc827d16479d99927a6923a0347199d7c694fb Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 14 May 2020 22:44:32 +0200 Subject: [PATCH 101/115] bpo-40602: _Py_hashtable_set() reports rehash failure (GH-20077) If _Py_hashtable_set() fails to grow the hash table (rehash), it now fails rather than ignoring the error. --- Modules/_testinternalcapi.c | 14 +++++++++----- Python/hashtable.c | 26 +++++++++++++++++--------- 2 files changed, 26 insertions(+), 14 deletions(-) diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 3ae387d945d761..5f217dcb8978e2 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -98,6 +98,11 @@ test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) return PyErr_NoMemory(); } + // Using an newly allocated table must not crash + assert(table->nentries == 0); + assert(table->nbuckets > 0); + assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL); + // Test _Py_hashtable_set() char key; for (key='a'; key <= 'z'; key++) { @@ -121,17 +126,15 @@ test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) // Test _Py_hashtable_get() for (key='a'; key <= 'z'; key++) { void *value_ptr = _Py_hashtable_get(table, TO_PTR(key)); - int value = (int)FROM_PTR(value_ptr); - assert(value == VALUE(key)); + assert((int)FROM_PTR(value_ptr) == VALUE(key)); } // Test _Py_hashtable_steal() key = 'p'; void *value_ptr = _Py_hashtable_steal(table, TO_PTR(key)); - int value = (int)FROM_PTR(value_ptr); - assert(value == VALUE(key)); - + assert((int)FROM_PTR(value_ptr) == VALUE(key)); assert(table->nentries == 25); + assert(_Py_hashtable_get_entry(table, TO_PTR(key)) == NULL); // Test _Py_hashtable_foreach() int count = 0; @@ -142,6 +145,7 @@ test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) // Test _Py_hashtable_clear() _Py_hashtable_clear(table); assert(table->nentries == 0); + assert(table->nbuckets > 0); assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL); _Py_hashtable_destroy(table); diff --git a/Python/hashtable.c b/Python/hashtable.c index 45c52859ac2d6c..b92e8ca08c7e1c 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -60,7 +60,7 @@ ((_Py_hashtable_entry_t *)_Py_SLIST_ITEM_NEXT(ENTRY)) /* Forward declaration */ -static void hashtable_rehash(_Py_hashtable_t *ht); +static int hashtable_rehash(_Py_hashtable_t *ht); static void _Py_slist_init(_Py_slist_t *list) @@ -198,6 +198,7 @@ _Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) ht->alloc.free(entry); if ((float)ht->nentries / (float)ht->nbuckets < HASHTABLE_LOW) { + // Ignore failure: error cannot be reported to the caller hashtable_rehash(ht); } return value; @@ -228,13 +229,17 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value) entry->key = (void *)key; entry->value = value; - size_t index = entry->key_hash & (ht->nbuckets - 1); - _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); ht->nentries++; - if ((float)ht->nentries / (float)ht->nbuckets > HASHTABLE_HIGH) { - hashtable_rehash(ht); + if (hashtable_rehash(ht) < 0) { + ht->nentries--; + ht->alloc.free(entry); + return -1; + } } + + size_t index = entry->key_hash & (ht->nbuckets - 1); + _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); return 0; } @@ -271,19 +276,19 @@ _Py_hashtable_foreach(_Py_hashtable_t *ht, } -static void +static int hashtable_rehash(_Py_hashtable_t *ht) { size_t new_size = round_size((size_t)(ht->nentries * HASHTABLE_REHASH_FACTOR)); if (new_size == ht->nbuckets) { - return; + return 0; } size_t buckets_size = new_size * sizeof(ht->buckets[0]); _Py_slist_t *new_buckets = ht->alloc.malloc(buckets_size); if (new_buckets == NULL) { /* memory allocation failed */ - return; + return -1; } memset(new_buckets, 0, buckets_size); @@ -303,6 +308,7 @@ hashtable_rehash(_Py_hashtable_t *ht) ht->alloc.free(ht->buckets); ht->nbuckets = new_size; ht->buckets = new_buckets; + return 0; } @@ -388,7 +394,9 @@ _Py_hashtable_clear(_Py_hashtable_t *ht) _Py_slist_init(&ht->buckets[i]); } ht->nentries = 0; - hashtable_rehash(ht); + // Ignore failure: clear function is not expected to fail + // because of a memory allocation failure. + (void)hashtable_rehash(ht); } From 75d7257b201a56f950c20cd9f5753a83fff4742b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filipe=20La=C3=ADns?= Date: Thu, 14 May 2020 23:11:40 +0100 Subject: [PATCH 102/115] bpo-40548: GitHub Action workflow: skip jobs on doc only PRs (GH-19983) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Filipe Laíns --- .github/workflows/build.yml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6e6a6d2b789d34..dabfb79e9dcea1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -16,9 +16,27 @@ on: - 3.7 jobs: + check_source: + name: 'Check for source changes' + runs-on: ubuntu-latest + outputs: + run_tests: ${{ steps.check.outputs.run_tests }} + steps: + - uses: actions/checkout@v2 + - name: Check for source changes + id: check + run: | + if [ -z "GITHUB_BASE_REF" ]; then + echo '::set-output name=run_tests::true' + else + git fetch origin $GITHUB_BASE_REF --depth=1 + git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.rst$|^Doc|^Misc)' && echo '::set-output name=run_tests::true' + fi build_win32: name: 'Windows (x86)' runs-on: windows-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout@v1 - name: Build CPython @@ -31,6 +49,8 @@ jobs: build_win_amd64: name: 'Windows (x64)' runs-on: windows-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout@v1 - name: Build CPython @@ -43,6 +63,8 @@ jobs: build_macos: name: 'macOS' runs-on: macos-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout@v1 - name: Configure CPython @@ -57,6 +79,8 @@ jobs: build_ubuntu: name: 'Ubuntu' runs-on: ubuntu-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' env: OPENSSL_VER: 1.1.1f steps: From edf2643bbb9859403239fe1cb3c212b1a2a8e65c Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Fri, 15 May 2020 00:51:51 +0200 Subject: [PATCH 103/115] bpo-40460: Fix typo in idlelib/zzdummy.py (GH-20093) Replace ztest with ztext. --- Lib/idlelib/zzdummy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Lib/idlelib/zzdummy.py b/Lib/idlelib/zzdummy.py index 8084499646653d..3c4b1d23b0d379 100644 --- a/Lib/idlelib/zzdummy.py +++ b/Lib/idlelib/zzdummy.py @@ -28,7 +28,7 @@ def z_in_event(self, event): text = self.text text.undo_block_start() for line in range(1, text.index('end')): - text.insert('%d.0', ztest) + text.insert('%d.0', ztext) text.undo_block_stop() return "break" From 4b972faf605912092013a1fdbf486c498d002926 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Fri, 15 May 2020 01:02:10 +0200 Subject: [PATCH 104/115] bpo-40462: Fix typo in test_json (GH-20094) --- Lib/test/test_json/test_recursion.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Lib/test/test_json/test_recursion.py b/Lib/test/test_json/test_recursion.py index 877dc448b14c15..543c62839b2cdd 100644 --- a/Lib/test/test_json/test_recursion.py +++ b/Lib/test/test_json/test_recursion.py @@ -52,7 +52,7 @@ def default(self, o): return [JSONTestObject] else: return 'JSONTestObject' - return pyjson.JSONEncoder.default(o) + return self.json.JSONEncoder.default(o) enc = RecursiveJSONEncoder() self.assertEqual(enc.encode(JSONTestObject), '"JSONTestObject"') From 7ba1f75f3f02b4b50ac6d7e17d15e467afa36aac Mon Sep 17 00:00:00 2001 From: Joannah Nanjekye <33177550+nanjekyejoannah@users.noreply.github.com> Date: Thu, 14 May 2020 21:59:46 -0300 Subject: [PATCH 105/115] bpo-38872: Document exec symbol for codeop.compile_command (GH-20047) * Document exec symbol for codeop.compile_command * Remove extra statements Co-authored-by: nanjekyejoannah --- Doc/library/code.rst | 4 ++-- Doc/library/codeop.rst | 5 +++-- Lib/codeop.py | 3 ++- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/Doc/library/code.rst b/Doc/library/code.rst index 6708079f778c1a..538e5afc7822aa 100644 --- a/Doc/library/code.rst +++ b/Doc/library/code.rst @@ -56,8 +56,8 @@ build applications which provide an interactive interpreter prompt. *source* is the source string; *filename* is the optional filename from which source was read, defaulting to ``''``; and *symbol* is the optional - grammar start symbol, which should be either ``'single'`` (the default) or - ``'eval'``. + grammar start symbol, which should be ``'single'`` (the default), ``'eval'`` + or ``'exec'``. Returns a code object (the same as ``compile(source, filename, symbol)``) if the command is complete and valid; ``None`` if the command is incomplete; raises diff --git a/Doc/library/codeop.rst b/Doc/library/codeop.rst index a52d2c62c4fea1..c66b9d3ec0a26d 100644 --- a/Doc/library/codeop.rst +++ b/Doc/library/codeop.rst @@ -43,8 +43,9 @@ To do just the former: :exc:`OverflowError` or :exc:`ValueError` if there is an invalid literal. The *symbol* argument determines whether *source* is compiled as a statement - (``'single'``, the default) or as an :term:`expression` (``'eval'``). Any - other value will cause :exc:`ValueError` to be raised. + (``'single'``, the default), as a sequence of statements (``'exec'``) or + as an :term:`expression` (``'eval'``). Any other value will + cause :exc:`ValueError` to be raised. .. note:: diff --git a/Lib/codeop.py b/Lib/codeop.py index 082285f94fe847..835e68c09ba272 100644 --- a/Lib/codeop.py +++ b/Lib/codeop.py @@ -112,7 +112,8 @@ def compile_command(source, filename="", symbol="single"): source -- the source string; may contain \n characters filename -- optional filename from which source was read; default "" - symbol -- optional grammar start symbol; "single" (default) or "eval" + symbol -- optional grammar start symbol; "single" (default), "exec" + or "eval" Return value / exceptions raised: From 16ab07063cb564c1937714bd39d6915172f005b5 Mon Sep 17 00:00:00 2001 From: Pablo Galindo Date: Fri, 15 May 2020 02:04:52 +0100 Subject: [PATCH 106/115] bpo-40334: Correctly identify invalid target in assignment errors (GH-20076) Co-authored-by: Lysandros Nikolaou --- Grammar/python.gram | 13 ++++++++-- Lib/test/test_dictcomps.py | 2 +- Lib/test/test_generators.py | 2 +- Lib/test/test_genexps.py | 2 +- Lib/test/test_peg_parser.py | 2 +- Lib/test/test_syntax.py | 46 ++++++++++++++++++++-------------- Parser/pegen/parse.c | 50 ++++++++++++++++++++++++++----------- Parser/pegen/pegen.c | 46 ++++++++++++++++++++++++++++++++++ Parser/pegen/pegen.h | 4 +++ Python/ast.c | 13 ++++++---- 10 files changed, 136 insertions(+), 44 deletions(-) diff --git a/Grammar/python.gram b/Grammar/python.gram index 9087c7aa718b17..cca92090546265 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -640,8 +640,17 @@ invalid_assignment: RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } | a=expression ':' expression ['=' annotated_rhs] { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") } - | a=expression ('=' | augassign) (yield_expr | star_expressions) { - RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot assign to %s", _PyPegen_get_expr_name(a)) } + | a=star_expressions '=' (yield_expr | star_expressions) { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + _PyPegen_get_invalid_target(a), + "cannot assign to %s", _PyPegen_get_expr_name(_PyPegen_get_invalid_target(a))) } + | a=star_expressions augassign (yield_expr | star_expressions) { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, + "'%s' is an illegal expression for augmented assignment", + _PyPegen_get_expr_name(a) + )} + invalid_block: | NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") } invalid_comprehension: diff --git a/Lib/test/test_dictcomps.py b/Lib/test/test_dictcomps.py index 16aa651b93c46b..472e3dfa0d8a0a 100644 --- a/Lib/test/test_dictcomps.py +++ b/Lib/test/test_dictcomps.py @@ -77,7 +77,7 @@ def test_illegal_assignment(self): compile("{x: y for y, x in ((1, 2), (3, 4))} = 5", "", "exec") - with self.assertRaisesRegex(SyntaxError, "cannot assign"): + with self.assertRaisesRegex(SyntaxError, "illegal expression"): compile("{x: y for y, x in ((1, 2), (3, 4))} += 5", "", "exec") diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 1081107ee64ace..348ae15aa6532b 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -1921,7 +1921,7 @@ def printsolution(self, x): >>> def f(): (yield bar) += y Traceback (most recent call last): ... -SyntaxError: cannot assign to yield expression +SyntaxError: 'yield expression' is an illegal expression for augmented assignment Now check some throw() conditions: diff --git a/Lib/test/test_genexps.py b/Lib/test/test_genexps.py index 86e4e195f55ec5..5c1a209b0e9908 100644 --- a/Lib/test/test_genexps.py +++ b/Lib/test/test_genexps.py @@ -158,7 +158,7 @@ >>> (y for y in (1,2)) += 10 Traceback (most recent call last): ... - SyntaxError: cannot assign to generator expression + SyntaxError: 'generator expression' is an illegal expression for augmented assignment ########### Tests borrowed from or inspired by test_generators.py ############ diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py index 71e071940de2f4..9614e45799dd8c 100644 --- a/Lib/test/test_peg_parser.py +++ b/Lib/test/test_peg_parser.py @@ -625,7 +625,7 @@ def f(): ("(a, b): int", "only single target (not tuple) can be annotated"), ("[a, b]: int", "only single target (not list) can be annotated"), ("a(): int", "illegal target for annotation"), - ("1 += 1", "cannot assign to literal"), + ("1 += 1", "'literal' is an illegal expression for augmented assignment"), ("pass\n pass", "unexpected indent"), ("def f():\npass", "expected an indented block"), ("def f(*): pass", "named arguments must follow bare *"), diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index a3a101534628a2..60c7d9fd3868e8 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -100,30 +100,37 @@ This test just checks a couple of cases rather than enumerating all of them. -# All of the following also produce different error messages with pegen -# >>> (a, "b", c) = (1, 2, 3) -# Traceback (most recent call last): -# SyntaxError: cannot assign to literal +>>> (a, "b", c) = (1, 2, 3) +Traceback (most recent call last): +SyntaxError: cannot assign to literal -# >>> (a, True, c) = (1, 2, 3) -# Traceback (most recent call last): -# SyntaxError: cannot assign to True +>>> (a, True, c) = (1, 2, 3) +Traceback (most recent call last): +SyntaxError: cannot assign to True >>> (a, __debug__, c) = (1, 2, 3) Traceback (most recent call last): SyntaxError: cannot assign to __debug__ -# >>> (a, *True, c) = (1, 2, 3) -# Traceback (most recent call last): -# SyntaxError: cannot assign to True +>>> (a, *True, c) = (1, 2, 3) +Traceback (most recent call last): +SyntaxError: cannot assign to True >>> (a, *__debug__, c) = (1, 2, 3) Traceback (most recent call last): SyntaxError: cannot assign to __debug__ -# >>> [a, b, c + 1] = [1, 2, 3] -# Traceback (most recent call last): -# SyntaxError: cannot assign to operator +>>> [a, b, c + 1] = [1, 2, 3] +Traceback (most recent call last): +SyntaxError: cannot assign to operator + +>>> [a, b[1], c + 1] = [1, 2, 3] +Traceback (most recent call last): +SyntaxError: cannot assign to operator + +>>> [a, b.c.d, c + 1] = [1, 2, 3] +Traceback (most recent call last): +SyntaxError: cannot assign to operator >>> a if 1 else b = 1 Traceback (most recent call last): @@ -131,15 +138,15 @@ >>> a, b += 1, 2 Traceback (most recent call last): -SyntaxError: invalid syntax +SyntaxError: 'tuple' is an illegal expression for augmented assignment >>> (a, b) += 1, 2 Traceback (most recent call last): -SyntaxError: cannot assign to tuple +SyntaxError: 'tuple' is an illegal expression for augmented assignment >>> [a, b] += 1, 2 Traceback (most recent call last): -SyntaxError: cannot assign to list +SyntaxError: 'list' is an illegal expression for augmented assignment From compiler_complex_args(): @@ -346,16 +353,16 @@ >>> (x for x in x) += 1 Traceback (most recent call last): -SyntaxError: cannot assign to generator expression +SyntaxError: 'generator expression' is an illegal expression for augmented assignment >>> None += 1 Traceback (most recent call last): -SyntaxError: cannot assign to None +SyntaxError: 'None' is an illegal expression for augmented assignment >>> __debug__ += 1 Traceback (most recent call last): SyntaxError: cannot assign to __debug__ >>> f() += 1 Traceback (most recent call last): -SyntaxError: cannot assign to function call +SyntaxError: 'function call' is an illegal expression for augmented assignment Test continue in finally in weird combinations. @@ -688,6 +695,7 @@ def _check_error(self, code, errtext, def test_assign_call(self): self._check_error("f() = 1", "assign") + @unittest.skipIf(support.use_old_parser(), "The old parser cannot generate these error messages") def test_assign_del(self): self._check_error("del (,)", "invalid syntax") self._check_error("del 1", "delete literal") diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 851d17226d162f..f4c5692212768d 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -10747,7 +10747,8 @@ invalid_named_expression_rule(Parser *p) // | tuple ':' // | star_named_expression ',' star_named_expressions* ':' // | expression ':' expression ['=' annotated_rhs] -// | expression ('=' | augassign) (yield_expr | star_expressions) +// | star_expressions '=' (yield_expr | star_expressions) +// | star_expressions augassign (yield_expr | star_expressions) static void * invalid_assignment_rule(Parser *p) { @@ -10841,19 +10842,40 @@ invalid_assignment_rule(Parser *p) } p->mark = _mark; } - { // expression ('=' | augassign) (yield_expr | star_expressions) + { // star_expressions '=' (yield_expr | star_expressions) + Token * _literal; void *_tmp_128_var; + expr_ty a; + if ( + (a = star_expressions_rule(p)) // star_expressions + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions + ) + { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( _PyPegen_get_invalid_target ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( _PyPegen_get_invalid_target ( a ) ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + { // star_expressions augassign (yield_expr | star_expressions) void *_tmp_129_var; expr_ty a; + AugOperator* augassign_var; if ( - (a = expression_rule(p)) // expression + (a = star_expressions_rule(p)) // star_expressions && - (_tmp_128_var = _tmp_128_rule(p)) // '=' | augassign + (augassign_var = augassign_rule(p)) // augassign && (_tmp_129_var = _tmp_129_rule(p)) // yield_expr | star_expressions ) { - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "'%s' is an illegal expression for augmented assignment" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; @@ -16675,7 +16697,7 @@ _tmp_127_rule(Parser *p) return _res; } -// _tmp_128: '=' | augassign +// _tmp_128: yield_expr | star_expressions static void * _tmp_128_rule(Parser *p) { @@ -16684,24 +16706,24 @@ _tmp_128_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // '=' - Token * _literal; + { // yield_expr + expr_ty yield_expr_var; if ( - (_literal = _PyPegen_expect_token(p, 22)) // token='=' + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - _res = _literal; + _res = yield_expr_var; goto done; } p->mark = _mark; } - { // augassign - AugOperator* augassign_var; + { // star_expressions + expr_ty star_expressions_var; if ( - (augassign_var = augassign_rule(p)) // augassign + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - _res = augassign_var; + _res = star_expressions_var; goto done; } p->mark = _mark; diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index 8b79a7364758e4..7f3e4561de6055 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -2054,3 +2054,49 @@ _PyPegen_make_module(Parser *p, asdl_seq *a) { } return Module(a, type_ignores, p->arena); } + +// Error reporting helpers + +expr_ty +_PyPegen_get_invalid_target(expr_ty e) +{ + if (e == NULL) { + return NULL; + } + +#define VISIT_CONTAINER(CONTAINER, TYPE) do { \ + Py_ssize_t len = asdl_seq_LEN(CONTAINER->v.TYPE.elts);\ + for (Py_ssize_t i = 0; i < len; i++) {\ + expr_ty other = asdl_seq_GET(CONTAINER->v.TYPE.elts, i);\ + expr_ty child = _PyPegen_get_invalid_target(other);\ + if (child != NULL) {\ + return child;\ + }\ + }\ + } while (0) + + // We only need to visit List and Tuple nodes recursively as those + // are the only ones that can contain valid names in targets when + // they are parsed as expressions. Any other kind of expression + // that is a container (like Sets or Dicts) is directly invalid and + // we don't need to visit it recursively. + + switch (e->kind) { + case List_kind: { + VISIT_CONTAINER(e, List); + return NULL; + } + case Tuple_kind: { + VISIT_CONTAINER(e, Tuple); + return NULL; + } + case Starred_kind: + return _PyPegen_get_invalid_target(e->v.Starred.value); + case Name_kind: + case Subscript_kind: + case Attribute_kind: + return NULL; + default: + return e; + } +} \ No newline at end of file diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index e5b1b757bd894b..b9d4c048bb52b0 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -260,6 +260,10 @@ void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); int _PyPegen_check_barry_as_flufl(Parser *); mod_ty _PyPegen_make_module(Parser *, asdl_seq *); +// Error reporting helpers + +expr_ty _PyPegen_get_invalid_target(expr_ty e); + void *_PyPegen_parse(Parser *); #endif diff --git a/Python/ast.c b/Python/ast.c index 1a4a3110e69559..2d20ca62aa8378 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -3164,10 +3164,7 @@ ast_for_expr_stmt(struct compiling *c, const node *n) expr1 = ast_for_testlist(c, ch); if (!expr1) return NULL; - if(!set_context(c, expr1, Store, ch)) - return NULL; - /* set_context checks that most expressions are not the left side. - Augmented assignments can only have a name, a subscript, or an + /* Augmented assignments can only have a name, a subscript, or an attribute on the left, though, so we have to explicitly check for those. */ switch (expr1->kind) { @@ -3176,10 +3173,16 @@ ast_for_expr_stmt(struct compiling *c, const node *n) case Subscript_kind: break; default: - ast_error(c, ch, "illegal expression for augmented assignment"); + ast_error(c, ch, "'%s' is an illegal expression for augmented assignment", + get_expr_name(expr1)); return NULL; } + /* set_context checks that most expressions are not the left side. */ + if(!set_context(c, expr1, Store, ch)) { + return NULL; + } + ch = CHILD(n, 2); if (TYPE(ch) == testlist) expr2 = ast_for_testlist(c, ch); From 6a78589b6b22878491a4b042bb8b3161e1d120f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filipe=20La=C3=ADns?= Date: Fri, 15 May 2020 03:08:21 +0100 Subject: [PATCH 107/115] bpo-40548: github actions: pass the changes check on no source changes (GH-20097) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Filipe Laíns --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index dabfb79e9dcea1..dbef550643e813 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -30,7 +30,7 @@ jobs: echo '::set-output name=run_tests::true' else git fetch origin $GITHUB_BASE_REF --depth=1 - git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.rst$|^Doc|^Misc)' && echo '::set-output name=run_tests::true' + git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.rst$|^Doc|^Misc)' && echo '::set-output name=run_tests::true' || true fi build_win32: name: 'Windows (x86)' From 1aa8767baf498a920f0461d1088772a12dcb4d20 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Thu, 14 May 2020 19:11:00 -0700 Subject: [PATCH 108/115] Update code comment re: location of struct _is. (GH-20067) --- Include/pystate.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Include/pystate.h b/Include/pystate.h index 34cad02c3a930d..bae440778b261a 100644 --- a/Include/pystate.h +++ b/Include/pystate.h @@ -18,7 +18,7 @@ struct _is; /* struct _ts is defined in cpython/pystate.h */ typedef struct _ts PyThreadState; -/* struct _is is defined in internal/pycore_pystate.h */ +/* struct _is is defined in internal/pycore_interp.h */ typedef struct _is PyInterpreterState; PyAPI_FUNC(PyInterpreterState *) PyInterpreterState_New(void); From 15bc9ab301d73f20bff47a12ef05326feb40f797 Mon Sep 17 00:00:00 2001 From: Guido van Rossum Date: Thu, 14 May 2020 19:22:48 -0700 Subject: [PATCH 109/115] bpo-40612: Fix SyntaxError edge cases in traceback formatting (GH-20072) This fixes both the traceback.py module and the C code for formatting syntax errors (in Python/pythonrun.c). They now both consistently do the following: - Suppress caret if it points left of text - Allow caret pointing just past end of line - If caret points past end of line, clip to *just* past end of line The syntax error formatting code in traceback.py was mostly rewritten; small, subtle changes were applied to the C code in pythonrun.c. There's still a difference when the text contains embedded newlines. Neither handles these very well, and I don't think the case occurs in practice. Automerge-Triggered-By: @gvanrossum --- Lib/test/test_cmd_line_script.py | 2 +- Lib/test/test_traceback.py | 34 +++++++--- Lib/traceback.py | 29 ++++---- .../2020-05-13-10-23-29.bpo-40612.gOIreM.rst | 2 + Python/pythonrun.c | 66 +++++++++++++------ 5 files changed, 94 insertions(+), 39 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index 171340581af228..15fca7b8a5191e 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -633,7 +633,7 @@ def test_syntaxerror_multi_line_fstring(self): stderr.splitlines()[-3:], [ b' foo"""', - b' ^', + b' ^', b'SyntaxError: f-string: empty expression not allowed', ], ) diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 7361d091cfbbef..f9a5f2fc53e1e9 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -58,13 +58,13 @@ def test_caret(self): SyntaxError) self.assertIn("^", err[2]) # third line has caret self.assertEqual(err[2].count('\n'), 1) # and no additional newline - self.assertEqual(err[1].find("+"), err[2].find("^")) # in the right place + self.assertEqual(err[1].find("+") + 1, err[2].find("^")) # in the right place err = self.get_exception_format(self.syntax_error_with_caret_non_ascii, SyntaxError) self.assertIn("^", err[2]) # third line has caret self.assertEqual(err[2].count('\n'), 1) # and no additional newline - self.assertEqual(err[1].find("+"), err[2].find("^")) # in the right place + self.assertEqual(err[1].find("+") + 1, err[2].find("^")) # in the right place def test_nocaret(self): exc = SyntaxError("error", ("x.py", 23, None, "bad syntax")) @@ -78,14 +78,13 @@ def test_bad_indentation(self): self.assertEqual(len(err), 4) self.assertEqual(err[1].strip(), "print(2)") self.assertIn("^", err[2]) - self.assertEqual(err[1].find(")"), err[2].find("^")) + self.assertEqual(err[1].find(")") + 1, err[2].find("^")) + # No caret for "unexpected indent" err = self.get_exception_format(self.syntax_error_bad_indentation2, IndentationError) - self.assertEqual(len(err), 4) + self.assertEqual(len(err), 3) self.assertEqual(err[1].strip(), "print(2)") - self.assertIn("^", err[2]) - self.assertEqual(err[1].find("p"), err[2].find("^")) def test_base_exception(self): # Test that exceptions derived from BaseException are formatted right @@ -656,7 +655,7 @@ def outer_raise(): self.assertIn('inner_raise() # Marker', blocks[2]) self.check_zero_div(blocks[2]) - @support.skip_if_new_parser("Pegen is arguably better here, so no need to fix this") + @unittest.skipIf(support.use_old_parser(), "Pegen is arguably better here, so no need to fix this") def test_syntax_error_offset_at_eol(self): # See #10186. def e(): @@ -666,7 +665,7 @@ def e(): def e(): exec("x = 5 | 4 |") msg = self.get_report(e).splitlines() - self.assertEqual(msg[-2], ' ^') + self.assertEqual(msg[-2], ' ^') def test_message_none(self): # A message that looks like "None" should not be treated specially @@ -679,6 +678,25 @@ def test_message_none(self): err = self.get_report(Exception('')) self.assertIn('Exception\n', err) + def test_syntax_error_various_offsets(self): + for offset in range(-5, 10): + for add in [0, 2]: + text = " "*add + "text%d" % offset + expected = [' File "file.py", line 1'] + if offset < 1: + expected.append(" %s" % text.lstrip()) + elif offset <= 6: + expected.append(" %s" % text.lstrip()) + expected.append(" %s^" % (" "*(offset-1))) + else: + expected.append(" %s" % text.lstrip()) + expected.append(" %s^" % (" "*5)) + expected.append("SyntaxError: msg") + expected.append("") + err = self.get_report(SyntaxError("msg", ("file.py", 1, offset+add, text))) + exp = "\n".join(expected) + self.assertEqual(exp, err) + class PyExcReportingTests(BaseExceptionReportingTests, unittest.TestCase): # diff --git a/Lib/traceback.py b/Lib/traceback.py index bf34bbab8a1629..a19e38718b1205 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -569,23 +569,30 @@ def format_exception_only(self): if not issubclass(self.exc_type, SyntaxError): yield _format_final_exc_line(stype, self._str) - return + else: + yield from self._format_syntax_error(stype) - # It was a syntax error; show exactly where the problem was found. + def _format_syntax_error(self, stype): + """Format SyntaxError exceptions (internal helper).""" + # Show exactly where the problem was found. filename = self.filename or "" lineno = str(self.lineno) or '?' yield ' File "{}", line {}\n'.format(filename, lineno) - badline = self.text - offset = self.offset - if badline is not None: - yield ' {}\n'.format(badline.strip()) - if offset is not None: - caretspace = badline.rstrip('\n') - offset = min(len(caretspace), offset) - 1 - caretspace = caretspace[:offset].lstrip() + text = self.text + if text is not None: + # text = " foo\n" + # rtext = " foo" + # ltext = "foo" + rtext = text.rstrip('\n') + ltext = rtext.lstrip(' \n\f') + spaces = len(rtext) - len(ltext) + yield ' {}\n'.format(ltext) + # Convert 1-based column offset to 0-based index into stripped text + caret = (self.offset or 0) - 1 - spaces + if caret >= 0: # non-space whitespace (likes tabs) must be kept for alignment - caretspace = ((c.isspace() and c or ' ') for c in caretspace) + caretspace = ((c if c.isspace() else ' ') for c in ltext[:caret]) yield ' {}^\n'.format(''.join(caretspace)) msg = self.msg or "" yield "{}: {}\n".format(stype, msg) diff --git a/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst b/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst new file mode 100644 index 00000000000000..32cc8073d3f79c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst @@ -0,0 +1,2 @@ +Fix edge cases in SyntaxError formatting. If the offset is <= 0, no caret is printed. +If the offset is > line length, the caret is printed pointing just after the last character. diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 45f08b707eb999..160f44d38e2e19 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -554,37 +554,65 @@ parse_syntax_error(PyObject *err, PyObject **message, PyObject **filename, static void print_error_text(PyObject *f, int offset, PyObject *text_obj) { - const char *text; - const char *nl; - - text = PyUnicode_AsUTF8(text_obj); + /* Convert text to a char pointer; return if error */ + const char *text = PyUnicode_AsUTF8(text_obj); if (text == NULL) return; - if (offset >= 0) { - if (offset > 0 && (size_t)offset == strlen(text) && text[offset - 1] == '\n') - offset--; - for (;;) { - nl = strchr(text, '\n'); - if (nl == NULL || nl-text >= offset) - break; - offset -= (int)(nl+1-text); - text = nl+1; + /* Convert offset from 1-based to 0-based */ + offset--; + + /* Strip leading whitespace from text, adjusting offset as we go */ + while (*text == ' ' || *text == '\t' || *text == '\f') { + text++; + offset--; + } + + /* Calculate text length excluding trailing newline */ + Py_ssize_t len = strlen(text); + if (len > 0 && text[len-1] == '\n') { + len--; + } + + /* Clip offset to at most len */ + if (offset > len) { + offset = len; + } + + /* Skip past newlines embedded in text */ + for (;;) { + const char *nl = strchr(text, '\n'); + if (nl == NULL) { + break; } - while (*text == ' ' || *text == '\t' || *text == '\f') { - text++; - offset--; + Py_ssize_t inl = nl - text; + if (inl >= (Py_ssize_t)offset) { + break; } + inl += 1; + text += inl; + len -= inl; + offset -= (int)inl; } + + /* Print text */ PyFile_WriteString(" ", f); PyFile_WriteString(text, f); - if (*text == '\0' || text[strlen(text)-1] != '\n') + + /* Make sure there's a newline at the end */ + if (text[len] != '\n') { PyFile_WriteString("\n", f); - if (offset == -1) + } + + /* Don't print caret if it points to the left of the text */ + if (offset < 0) return; + + /* Write caret line */ PyFile_WriteString(" ", f); - while (--offset > 0) + while (--offset >= 0) { PyFile_WriteString(" ", f); + } PyFile_WriteString("^\n", f); } From 4a12d121860fb60d56cdcc212817577cac2356d0 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Thu, 14 May 2020 19:25:45 -0700 Subject: [PATCH 110/115] Fix typo in code comment in main_loop label. (GH-20068) --- Python/ceval.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Python/ceval.c b/Python/ceval.c index 699ad86a365b18..43ea1c760b17e9 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -1396,7 +1396,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, PyFrameObject *f, int throwflag) /* Do periodic things. Doing this every time through the loop would add too much overhead, so we do it only every Nth instruction. We also do it if - ``pendingcalls_to_do'' is set, i.e. when an asynchronous + ``pending.calls_to_do'' is set, i.e. when an asynchronous event needs attention (e.g. a signal handler or async I/O handler); see Py_AddPendingCall() and Py_MakePendingCalls() above. */ From 003708bcf8f2c58d4b65f68318acf164d713e008 Mon Sep 17 00:00:00 2001 From: Andrew York Date: Fri, 15 May 2020 03:43:58 -0700 Subject: [PATCH 111/115] Trivial typo fix in _tkinter.c (GH-19622) Change spelling of a #define in _tkinter.c from HAVE_LIBTOMMAMTH to HAVE_LIBTOMMATH, since this is used to keep track of tclTomMath.h, not tclTomMamth.h. No other file seems to refer to this variable. --- Modules/_tkinter.c | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index f530c5b0eb7b69..793c5e71548846 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -54,7 +54,7 @@ Copyright (C) 1994 Steen Lumholt. #if TK_HEX_VERSION >= 0x08050208 && TK_HEX_VERSION < 0x08060000 || \ TK_HEX_VERSION >= 0x08060200 -#define HAVE_LIBTOMMAMTH +#define HAVE_LIBTOMMATH #include #endif @@ -965,7 +965,7 @@ static PyType_Spec PyTclObject_Type_spec = { #define CHECK_STRING_LENGTH(s) #endif -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH static Tcl_Obj* asBignumObj(PyObject *value) { @@ -1045,7 +1045,7 @@ AsObj(PyObject *value) #endif /* If there is an overflow in the wideInt conversion, fall through to bignum handling. */ -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH return asBignumObj(value); #endif /* If there is no wideInt or bignum support, @@ -1167,7 +1167,7 @@ fromWideIntObj(TkappObject *tkapp, Tcl_Obj *value) return NULL; } -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH static PyObject* fromBignumObj(TkappObject *tkapp, Tcl_Obj *value) { @@ -1247,7 +1247,7 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value) fall through to bignum handling. */ } -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH if (value->typePtr == tkapp->IntType || value->typePtr == tkapp->WideIntType || value->typePtr == tkapp->BignumType) { @@ -1300,7 +1300,7 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value) } #endif -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH if (tkapp->BignumType == NULL && strcmp(value->typePtr->name, "bignum") == 0) { /* bignum type is not registered in Tcl */ @@ -2001,7 +2001,7 @@ _tkinter_tkapp_getint(TkappObject *self, PyObject *arg) Prefer bignum because Tcl_GetWideIntFromObj returns ambiguous result for value in ranges -2**64..-2**63-1 and 2**63..2**64-1 (on 32-bit platform). */ -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH result = fromBignumObj(self, value); #else result = fromWideIntObj(self, value); From 6e57237faf0da8904e0130a11350cae3c5062b82 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Fri, 15 May 2020 18:06:23 +0200 Subject: [PATCH 112/115] bpo-40055: test_distutils leaves warnings filters unchanged (GH-20095) distutils.tests now saves/restores warnings filters to leave them unchanged. Importing tests imports docutils which imports pkg_resources which adds a warnings filter. --- Lib/distutils/tests/__init__.py | 6 ++++++ .../next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst | 3 +++ 2 files changed, 9 insertions(+) create mode 100644 Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst diff --git a/Lib/distutils/tests/__init__.py b/Lib/distutils/tests/__init__.py index 1b939cbd5db2bf..5d2e69e3e6a8f6 100644 --- a/Lib/distutils/tests/__init__.py +++ b/Lib/distutils/tests/__init__.py @@ -15,6 +15,7 @@ import os import sys import unittest +import warnings from test.support import run_unittest @@ -22,6 +23,7 @@ def test_suite(): + old_filters = warnings.filters[:] suite = unittest.TestSuite() for fn in os.listdir(here): if fn.startswith("test") and fn.endswith(".py"): @@ -29,6 +31,10 @@ def test_suite(): __import__(modname) module = sys.modules[modname] suite.addTest(module.test_suite()) + # bpo-40055: Save/restore warnings filters to leave them unchanged. + # Importing tests imports docutils which imports pkg_resources which adds a + # warnings filter. + warnings.filters[:] = old_filters return suite diff --git a/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst new file mode 100644 index 00000000000000..edb01182c3a5ce --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst @@ -0,0 +1,3 @@ +distutils.tests now saves/restores warnings filters to leave them unchanged. +Importing tests imports docutils which imports pkg_resources which adds a +warnings filter. From 16d4e6f6f559b4fd21c9d29fea303489f658674f Mon Sep 17 00:00:00 2001 From: Christian Heimes Date: Fri, 15 May 2020 18:28:05 +0200 Subject: [PATCH 113/115] bpo-40479: Fix hashlib issue with OpenSSL 3.0.0 (GH-20107) OpenSSL 3.0.0-alpha2 was released today. The FIPS_mode() function has been deprecated and removed. It no longer makes sense with the new provider and context system in OpenSSL 3.0.0. EVP_default_properties_is_fips_enabled() is good enough for our needs in unit tests. It's an internal API, too. Signed-off-by: Christian Heimes --- .../2020-05-15-17-38-21.bpo-40479.yamSCh.rst | 1 + Modules/_hashopenssl.c | 15 +++++++++++---- Modules/clinic/_hashopenssl.c.h | 10 ++++++---- 3 files changed, 18 insertions(+), 8 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst diff --git a/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst b/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst new file mode 100644 index 00000000000000..87ede982f29677 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst @@ -0,0 +1 @@ +The :mod:`hashlib` now compiles with OpenSSL 3.0.0-alpha2. diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 91834e5330f4bd..b7661b40d0a7ba 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -1109,19 +1109,25 @@ _hashlib.get_fips_mode -> int Determine the OpenSSL FIPS mode of operation. +For OpenSSL 3.0.0 and newer it returns the state of the default provider +in the default OSSL context. It's not quite the same as FIPS_mode() but good +enough for unittests. + Effectively any non-zero return value indicates FIPS mode; values other than 1 may have additional significance. - -See OpenSSL documentation for the FIPS_mode() function for details. [clinic start generated code]*/ static int _hashlib_get_fips_mode_impl(PyObject *module) -/*[clinic end generated code: output=87eece1bab4d3fa9 input=c2799c3132a36d6c]*/ +/*[clinic end generated code: output=87eece1bab4d3fa9 input=2db61538c41c6fef]*/ { + int result; +#if OPENSSL_VERSION_NUMBER >= 0x30000000L + result = EVP_default_properties_is_fips_enabled(NULL); +#else ERR_clear_error(); - int result = FIPS_mode(); + result = FIPS_mode(); if (result == 0) { // "If the library was built without support of the FIPS Object Module, // then the function will return 0 with an error code of @@ -1134,6 +1140,7 @@ _hashlib_get_fips_mode_impl(PyObject *module) } } return result; +#endif } #endif // !LIBRESSL_VERSION_NUMBER diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index 275784dcdcd0e9..1b0c6d0ce43d27 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -733,10 +733,12 @@ PyDoc_STRVAR(_hashlib_get_fips_mode__doc__, "\n" "Determine the OpenSSL FIPS mode of operation.\n" "\n" -"Effectively any non-zero return value indicates FIPS mode;\n" -"values other than 1 may have additional significance.\n" +"For OpenSSL 3.0.0 and newer it returns the state of the default provider\n" +"in the default OSSL context. It\'s not quite the same as FIPS_mode() but good\n" +"enough for unittests.\n" "\n" -"See OpenSSL documentation for the FIPS_mode() function for details."); +"Effectively any non-zero return value indicates FIPS mode;\n" +"values other than 1 may have additional significance."); #define _HASHLIB_GET_FIPS_MODE_METHODDEF \ {"get_fips_mode", (PyCFunction)_hashlib_get_fips_mode, METH_NOARGS, _hashlib_get_fips_mode__doc__}, @@ -769,4 +771,4 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _HASHLIB_GET_FIPS_MODE_METHODDEF #define _HASHLIB_GET_FIPS_MODE_METHODDEF #endif /* !defined(_HASHLIB_GET_FIPS_MODE_METHODDEF) */ -/*[clinic end generated code: output=b0703dd5a043394d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4babbd88389a196b input=a9049054013a1b77]*/ From 62d618c06bd395308b7163dbcb26c7e6d0922033 Mon Sep 17 00:00:00 2001 From: Christian Heimes Date: Fri, 15 May 2020 18:48:25 +0200 Subject: [PATCH 114/115] bpo-40479: Test with latest OpenSSL versions (GH-20108) * 1.0.2u (EOL) * 1.1.0l (EOL) * 1.1.1g * 3.0.0-alpha2 (disabled for now) Build the FIPS provider and create a FIPS configuration file for OpenSSL 3.0.0. Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran --- .../2020-05-15-17-48-25.bpo-40479.B1gBl-.rst | 2 + Tools/ssl/multissltests.py | 61 ++++++++++++++++++- 2 files changed, 60 insertions(+), 3 deletions(-) create mode 100644 Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst new file mode 100644 index 00000000000000..b59035971d7b08 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst @@ -0,0 +1,2 @@ +Update multissltest helper to test with latest OpenSSL 1.0.2, 1.1.0, 1.1.1, +and 3.0.0-alpha. diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 05d6d7de296db9..7aa28bd2157fb3 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -41,13 +41,13 @@ log = logging.getLogger("multissl") OPENSSL_OLD_VERSIONS = [ - "1.0.2", ] OPENSSL_RECENT_VERSIONS = [ - "1.0.2t", + "1.0.2u", "1.1.0l", - "1.1.1f", + "1.1.1g", + # "3.0.0-alpha2" ] LIBRESSL_OLD_VERSIONS = [ @@ -143,6 +143,23 @@ help="Keep original sources for debugging." ) +OPENSSL_FIPS_CNF = """\ +openssl_conf = openssl_init + +.include {self.install_dir}/ssl/fipsinstall.cnf +# .include {self.install_dir}/ssl/openssl.cnf + +[openssl_init] +providers = provider_sect + +[provider_sect] +fips = fips_sect +default = default_sect + +[default_sect] +activate = 1 +""" + class AbstractBuilder(object): library = None @@ -291,9 +308,13 @@ def _make_install(self): ["make", "-j1", self.install_target], cwd=self.build_dir ) + self._post_install() if not self.args.keep_sources: shutil.rmtree(self.build_dir) + def _post_install(self): + pass + def install(self): log.info(self.openssl_cli) if not self.has_openssl or self.args.force: @@ -365,6 +386,40 @@ class BuildOpenSSL(AbstractBuilder): # only install software, skip docs install_target = 'install_sw' + def _post_install(self): + if self.version.startswith("3.0"): + self._post_install_300() + + def _post_install_300(self): + # create ssl/ subdir with example configs + self._subprocess_call( + ["make", "-j1", "install_ssldirs"], + cwd=self.build_dir + ) + # Install FIPS module + # https://wiki.openssl.org/index.php/OpenSSL_3.0#Completing_the_installation_of_the_FIPS_Module + fipsinstall_cnf = os.path.join( + self.install_dir, "ssl", "fipsinstall.cnf" + ) + openssl_fips_cnf = os.path.join( + self.install_dir, "ssl", "openssl-fips.cnf" + ) + fips_mod = os.path.join(self.lib_dir, "ossl-modules/fips.so") + self._subprocess_call( + [ + self.openssl_cli, "fipsinstall", + "-out", fipsinstall_cnf, + "-module", fips_mod, + "-provider_name", "fips", + "-mac_name", "HMAC", + "-macopt", "digest:SHA256", + "-macopt", "hexkey:00", + "-section_name", "fips_sect" + ] + ) + with open(openssl_fips_cnf, "w") as f: + f.write(OPENSSL_FIPS_CNF.format(self=self)) + class BuildLibreSSL(AbstractBuilder): library = "LibreSSL" From 8d39d0a8099f4aeb273b0ce00258478bb4f3a578 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Fri, 15 May 2020 21:15:01 +0300 Subject: [PATCH 115/115] Update NEWS. --- Misc/NEWS.d/3.9.0a1.rst | 2 +- .../next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst index e5b4972b1922c4..fb74d3622263d4 100644 --- a/Misc/NEWS.d/3.9.0a1.rst +++ b/Misc/NEWS.d/3.9.0a1.rst @@ -3372,7 +3372,7 @@ markup and any values in the message. Patch by Paul Ganssle .. section: Library Removed methods Element.getchildren(), Element.getiterator() and -ElementTree.getiterator(). +ElementTree.getiterator() and the xml.etree.cElementTree module. .. diff --git a/Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst b/Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst new file mode 100644 index 00000000000000..468c1ac9eee17c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst @@ -0,0 +1 @@ +Restored the deprecated :mod:`xml.etree.cElementTree` module.